From 4dbed1a94c312eda2b4fc1045925dfcfe96edc3e Mon Sep 17 00:00:00 2001 From: Gabriel Igliozzi Date: Wed, 13 Aug 2025 12:30:45 -0700 Subject: [PATCH] Revert "0.9.1 release" --- .github/ISSUE_TEMPLATE/iceberg_bug_report.yml | 4 +- .github/dependabot.yml | 4 +- .github/pull_request_template.md | 14 - .github/workflows/license_check.yml | 2 +- .github/workflows/nightly-pypi-build.yml | 4 +- .github/workflows/pypi-build-artifacts.yml | 4 +- .github/workflows/python-ci-docs.yml | 2 +- .github/workflows/python-ci.yml | 54 +- .github/workflows/python-integration.yml | 59 + .github/workflows/python-release-docs.yml | 2 +- .github/workflows/python-release.yml | 2 +- .github/workflows/svn-build-artifacts.yml | 4 +- .gitignore | 1 - .pre-commit-config.yaml | 4 +- Makefile | 143 +- dev/.rat-excludes | 1 - dev/Dockerfile | 10 +- dev/docker-compose-azurite.yml | 2 +- dev/docker-compose-integration.yml | 3 +- dev/docker-compose.yml | 2 +- dev/hive/core-site.xml | 5 - dev/provision.py | 170 +- mkdocs/docs/SUMMARY.md | 2 - mkdocs/docs/api.md | 299 +- mkdocs/docs/cli.md | 16 - mkdocs/docs/community.md | 2 +- mkdocs/docs/configuration.md | 273 +- mkdocs/docs/contributing.md | 12 +- mkdocs/docs/expression-dsl.md | 244 - mkdocs/docs/how-to-release.md | 2 - mkdocs/docs/index.md | 3 +- mkdocs/docs/row-filter-syntax.md | 175 - poetry.lock | 5206 +++++++---------- pyiceberg/__init__.py | 2 +- pyiceberg/avro/codecs/__init__.py | 13 +- pyiceberg/avro/codecs/snappy_codec.py | 4 +- pyiceberg/avro/codecs/zstandard_codec.py | 2 +- pyiceberg/avro/encoder.py | 4 - pyiceberg/avro/file.py | 64 +- pyiceberg/avro/reader.py | 55 +- pyiceberg/avro/resolver.py | 38 +- pyiceberg/avro/writer.py | 26 +- pyiceberg/catalog/__init__.py | 37 +- pyiceberg/catalog/dynamodb.py | 40 +- pyiceberg/catalog/glue.py | 163 +- pyiceberg/catalog/hive.py | 96 +- pyiceberg/catalog/memory.py | 5 +- .../catalog/{rest/__init__.py => rest.py} | 274 +- pyiceberg/catalog/rest/auth.py | 231 - pyiceberg/catalog/rest/response.py | 111 - pyiceberg/catalog/sql.py | 24 +- pyiceberg/cli/console.py | 24 +- pyiceberg/conversions.py | 277 +- pyiceberg/exceptions.py | 4 - pyiceberg/expressions/__init__.py | 61 +- pyiceberg/expressions/literals.py | 27 +- pyiceberg/expressions/parser.py | 24 +- pyiceberg/expressions/visitors.py | 42 +- pyiceberg/io/__init__.py | 27 +- pyiceberg/io/fsspec.py | 32 +- pyiceberg/io/pyarrow.py | 676 +-- pyiceberg/manifest.py | 608 +- pyiceberg/partitioning.py | 31 +- pyiceberg/schema.py | 88 +- pyiceberg/table/__init__.py | 485 +- pyiceberg/table/inspect.py | 178 +- pyiceberg/table/maintenance.py | 45 - pyiceberg/table/metadata.py | 24 +- pyiceberg/table/puffin.py | 116 - pyiceberg/table/snapshots.py | 26 +- pyiceberg/table/statistics.py | 17 +- pyiceberg/table/update/__init__.py | 57 +- pyiceberg/table/update/schema.py | 132 +- pyiceberg/table/update/snapshot.py | 302 +- pyiceberg/table/update/spec.py | 16 +- pyiceberg/table/update/validate.py | 237 - pyiceberg/table/upsert_util.py | 106 +- pyiceberg/transforms.py | 243 +- pyiceberg/typedef.py | 47 +- pyiceberg/types.py | 147 +- pyiceberg/utils/concurrent.py | 10 +- pyiceberg/utils/config.py | 9 +- pyiceberg/utils/datetime.py | 95 - pyiceberg/utils/schema_conversion.py | 49 +- pyproject.toml | 86 +- tests/avro/test_file.py | 165 +- tests/avro/test_reader.py | 45 +- tests/avro/test_resolver.py | 6 +- tests/avro/test_writer.py | 56 +- tests/catalog/test_base.py | 23 +- tests/catalog/test_dynamodb.py | 8 - tests/catalog/test_glue.py | 22 - tests/catalog/test_hive.py | 164 +- tests/catalog/test_rest.py | 272 +- tests/catalog/test_rest_auth.py | 155 - tests/catalog/test_sql.py | 81 +- tests/cli/test_console.py | 20 +- tests/conftest.py | 216 +- tests/expressions/test_evaluator.py | 33 +- tests/expressions/test_expressions.py | 89 +- tests/expressions/test_literals.py | 18 +- tests/expressions/test_parser.py | 22 - tests/expressions/test_residual_evaluator.py | 28 +- tests/expressions/test_visitors.py | 824 +-- tests/integration/test_add_files.py | 108 +- tests/integration/test_catalog.py | 316 - tests/integration/test_deletes.py | 49 +- tests/integration/test_hive_migration.py | 83 - tests/integration/test_inspect_table.py | 413 +- tests/integration/test_partition_evolution.py | 8 - tests/integration/test_partitioning_key.py | 89 +- tests/integration/test_reads.py | 164 +- tests/integration/test_rest_manifest.py | 26 +- tests/integration/test_rest_schema.py | 106 +- .../integration/test_statistics_operations.py | 7 - .../test_optimistic_concurrency.py | 93 - .../test_writes/test_partitioned_writes.py | 25 +- tests/integration/test_writes/test_writes.py | 674 +-- tests/io/test_pyarrow.py | 436 +- tests/io/test_pyarrow_stats.py | 54 +- tests/io/test_pyarrow_visitor.py | 14 +- tests/table/bitmaps/64map32bitvals.bin | Bin 48 -> 0 bytes tests/table/bitmaps/64mapempty.bin | Bin 8 -> 0 bytes tests/table/bitmaps/64maphighvals.bin | Bin 1086 -> 0 bytes tests/table/bitmaps/64mapspreadvals.bin | Bin 408 -> 0 bytes tests/table/test_datafusion.py | 64 - tests/table/test_expire_snapshots.py | 225 - tests/table/test_init.py | 199 +- tests/table/test_locations.py | 10 +- tests/table/test_metadata.py | 115 +- tests/table/test_partitioning.py | 14 +- tests/table/test_puffin.py | 74 - tests/table/test_snapshots.py | 153 +- tests/table/test_statistics.py | 54 - tests/table/test_upsert.py | 422 +- tests/table/test_validate.py | 352 -- tests/test_avro_sanitization.py | 269 - tests/test_conversions.py | 57 - tests/test_schema.py | 26 +- tests/test_transforms.py | 84 +- tests/test_typedef.py | 44 +- tests/test_types.py | 58 +- tests/utils/test_config.py | 8 - tests/utils/test_datetime.py | 65 +- tests/utils/test_manifest.py | 61 +- tests/utils/test_schema_conversion.py | 35 +- vendor/Makefile | 40 - vendor/README.md | 40 +- 148 files changed, 5034 insertions(+), 14614 deletions(-) delete mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/python-integration.yml delete mode 100644 mkdocs/docs/expression-dsl.md delete mode 100644 mkdocs/docs/row-filter-syntax.md rename pyiceberg/catalog/{rest/__init__.py => rest.py} (80%) delete mode 100644 pyiceberg/catalog/rest/auth.py delete mode 100644 pyiceberg/catalog/rest/response.py delete mode 100644 pyiceberg/table/maintenance.py delete mode 100644 pyiceberg/table/puffin.py delete mode 100644 pyiceberg/table/update/validate.py delete mode 100644 tests/catalog/test_rest_auth.py delete mode 100644 tests/integration/test_catalog.py delete mode 100644 tests/integration/test_hive_migration.py delete mode 100644 tests/integration/test_writes/test_optimistic_concurrency.py delete mode 100644 tests/table/bitmaps/64map32bitvals.bin delete mode 100644 tests/table/bitmaps/64mapempty.bin delete mode 100644 tests/table/bitmaps/64maphighvals.bin delete mode 100644 tests/table/bitmaps/64mapspreadvals.bin delete mode 100644 tests/table/test_datafusion.py delete mode 100644 tests/table/test_expire_snapshots.py delete mode 100644 tests/table/test_puffin.py delete mode 100644 tests/table/test_statistics.py delete mode 100644 tests/table/test_validate.py delete mode 100644 tests/test_avro_sanitization.py delete mode 100644 vendor/Makefile diff --git a/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml b/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml index 60f9d7c756..cfcabd0a6f 100644 --- a/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml @@ -28,9 +28,7 @@ body: description: What Apache Iceberg version are you using? multiple: false options: - - "0.9.1 (latest release)" - - "0.9.0" - - "0.8.1" + - "0.8.1 (latest release)" - "0.8.0" - "0.7.1" - "0.7.0" diff --git a/.github/dependabot.yml b/.github/dependabot.yml index ab54eb410f..b92e7c26b5 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -22,9 +22,9 @@ updates: - package-ecosystem: "pip" directory: "/" schedule: - interval: "weekly" + interval: "daily" open-pull-requests-limit: 50 - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "weekly" + interval: "daily" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index afcc22b98a..0000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,14 +0,0 @@ - - - - - -# Rationale for this change - -# Are these changes tested? - -# Are there any user-facing changes? - - diff --git a/.github/workflows/license_check.yml b/.github/workflows/license_check.yml index a501a6807e..9408a3a046 100644 --- a/.github/workflows/license_check.yml +++ b/.github/workflows/license_check.yml @@ -24,5 +24,5 @@ jobs: rat: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 - run: dev/check-license diff --git a/.github/workflows/nightly-pypi-build.yml b/.github/workflows/nightly-pypi-build.yml index 9bb0894159..e7a32a3d0a 100644 --- a/.github/workflows/nightly-pypi-build.yml +++ b/.github/workflows/nightly-pypi-build.yml @@ -31,7 +31,7 @@ jobs: outputs: VERSION: ${{ steps.set-version.outputs.VERSION }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 with: fetch-depth: 1 @@ -71,7 +71,7 @@ jobs: steps: - name: Download all the artifacts - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v4 with: merge-multiple: true path: dist/ diff --git a/.github/workflows/pypi-build-artifacts.yml b/.github/workflows/pypi-build-artifacts.yml index a6e9769bbf..288e5c0046 100644 --- a/.github/workflows/pypi-build-artifacts.yml +++ b/.github/workflows/pypi-build-artifacts.yml @@ -35,7 +35,7 @@ jobs: os: [ ubuntu-22.04, windows-2022, macos-13, macos-14 ] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 with: fetch-depth: 1 @@ -62,7 +62,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v3.1.3 + uses: pypa/cibuildwheel@v2.22.0 with: output-dir: wheelhouse config-file: "pyproject.toml" diff --git a/.github/workflows/python-ci-docs.yml b/.github/workflows/python-ci-docs.yml index c9cb7f05af..d6e14c8400 100644 --- a/.github/workflows/python-ci-docs.yml +++ b/.github/workflows/python-ci-docs.yml @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 - name: Install poetry run: make install-poetry - uses: actions/setup-python@v5 diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml index 63559c35d4..772d198e28 100644 --- a/.github/workflows/python-ci.yml +++ b/.github/workflows/python-ci.yml @@ -50,59 +50,17 @@ jobs: python: ['3.9', '3.10', '3.11', '3.12'] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 - name: Install poetry run: make install-poetry - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - - name: Install system dependencies - run: sudo apt-get update && sudo apt-get install -y libkrb5-dev # for kerberos + cache: poetry + cache-dependency-path: ./poetry.lock - name: Install run: make install-dependencies - - name: Run linters + - name: Linters run: make lint - - name: Run unit tests with coverage - run: COVERAGE=1 make test - - name: Generate coverage report (85%) # Coverage threshold should only increase over time — never decrease it! - run: COVERAGE_FAIL_UNDER=85 make coverage-report - - integration-test: - runs-on: ubuntu-22.04 - strategy: - matrix: - python: ['3.9', '3.10', '3.11', '3.12'] - - steps: - - uses: actions/checkout@v5 - - name: Install system dependencies - run: sudo apt-get update && sudo apt-get install -y libkrb5-dev # for kerberos - - name: Install - run: make install - - - name: Run integration tests with coverage - run: COVERAGE=1 make test-integration - - name: Show debug logs - if: ${{ failure() }} - run: docker compose -f dev/docker-compose.yml logs - - - name: Run s3 integration tests with coverage - run: COVERAGE=1 make test-s3 - - name: Show debug logs - if: ${{ failure() }} - run: docker compose -f dev/docker-compose.yml logs - - - name: Run adls integration tests with coverage - run: COVERAGE=1 make test-adls - - name: Show debug logs - if: ${{ failure() }} - run: docker compose -f dev/docker-compose-azurite.yml logs - - - name: Run gcs integration tests with coverage - run: COVERAGE=1 make test-gcs - - name: Show debug logs - if: ${{ failure() }} - run: docker compose -f dev/docker-compose-gcs-server.yml logs - - - name: Generate coverage report (75%) # Coverage threshold should only increase over time — never decrease it! - run: COVERAGE_FAIL_UNDER=75 make coverage-report + - name: Tests + run: make test-coverage diff --git a/.github/workflows/python-integration.yml b/.github/workflows/python-integration.yml new file mode 100644 index 0000000000..8b0a8a97f0 --- /dev/null +++ b/.github/workflows/python-integration.yml @@ -0,0 +1,59 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name: "Python Integration" + +on: + push: + branches: + - 'main' + pull_request: + paths: + - '**' # Include all files and directories in the repository by default. + - '!.github/workflows/**' # Exclude all workflow files + - '.github/workflows/python-integration.yml' # except the current file. + - '!.github/ISSUE_TEMPLATE/**' # Exclude files and directories that don't impact tests or code like templates, metadata, and documentation. + - '!.gitignore' + - '!.asf.yml' + - '!mkdocs/**' + - '!.gitattributes' + - '!README.md' + - '!CONTRIBUTING.md' + - '!LICENSE' + - '!NOTICE' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + +jobs: + integration-test: + runs-on: ubuntu-22.04 + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 + - name: Install + run: make install + - name: Run integration tests + run: make test-integration + - name: Show debug logs + if: ${{ failure() }} + run: docker compose -f dev/docker-compose.yml logs diff --git a/.github/workflows/python-release-docs.yml b/.github/workflows/python-release-docs.yml index 7a3a163de6..2823563fe5 100644 --- a/.github/workflows/python-release-docs.yml +++ b/.github/workflows/python-release-docs.yml @@ -30,7 +30,7 @@ jobs: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 - name: Install poetry run: make install-poetry - uses: actions/setup-python@v5 diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index a4f04b446b..a6175ead9e 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -89,7 +89,7 @@ jobs: needs: - validate-inputs steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 with: fetch-depth: 1 diff --git a/.github/workflows/svn-build-artifacts.yml b/.github/workflows/svn-build-artifacts.yml index 463dfcbfec..8336b46940 100644 --- a/.github/workflows/svn-build-artifacts.yml +++ b/.github/workflows/svn-build-artifacts.yml @@ -35,7 +35,7 @@ jobs: os: [ ubuntu-22.04, windows-2022, macos-13, macos-14 ] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 with: fetch-depth: 1 @@ -57,7 +57,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v3.1.3 + uses: pypa/cibuildwheel@v2.22.0 with: output-dir: wheelhouse config-file: "pyproject.toml" diff --git a/.gitignore b/.gitignore index 064ce38fcc..7043f0e7d4 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ coverage.xml .project .settings bin/ -.vscode/ # Hive/metastore files metastore_db/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index afc1449321..66f830e2b8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,13 +27,13 @@ repos: - id: check-yaml - id: check-ast - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.13 + rev: v0.8.6 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.16.0 + rev: v1.14.1 hooks: - id: mypy args: diff --git a/Makefile b/Makefile index 6bc55e94aa..95266b6d25 100644 --- a/Makefile +++ b/Makefile @@ -14,143 +14,102 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# ======================== -# Configuration Variables -# ======================== -PYTEST_ARGS ?= -v # Override with e.g. PYTEST_ARGS="-vv --tb=short" -COVERAGE ?= 0 # Set COVERAGE=1 to enable coverage: make test COVERAGE=1 -COVERAGE_FAIL_UNDER ?= 85 # Minimum coverage % to pass: make coverage-report COVERAGE_FAIL_UNDER=70 -ifeq ($(COVERAGE),1) - TEST_RUNNER = poetry run coverage run --parallel-mode --source=pyiceberg -m -else - TEST_RUNNER = poetry run -endif +help: ## Display this help + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-20s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) -POETRY_VERSION = 2.1.4 - -# ============ -# Help Section -# ============ - -##@ General - -help: ## Display this help message - @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) - -# ================== -# Installation Tasks -# ================== - -##@ Setup - -install-poetry: ## Ensure Poetry is installed at the specified version +POETRY_VERSION = 2.0.1 +install-poetry: ## Ensure Poetry is installed and the correct version is being used. @if ! command -v poetry &> /dev/null; then \ - echo "Poetry not found. Installing..."; \ + echo "Poetry could not be found. Installing..."; \ pip install --user poetry==$(POETRY_VERSION); \ else \ INSTALLED_VERSION=$$(pip show poetry | grep Version | awk '{print $$2}'); \ if [ "$$INSTALLED_VERSION" != "$(POETRY_VERSION)" ]; then \ - echo "Updating Poetry to version $(POETRY_VERSION)..."; \ + echo "Poetry version $$INSTALLED_VERSION does not match required version $(POETRY_VERSION). Updating..."; \ pip install --user --upgrade poetry==$(POETRY_VERSION); \ else \ - echo "Poetry version $(POETRY_VERSION) already installed."; \ - fi; \ + echo "Poetry version $$INSTALLED_VERSION is already installed."; \ + fi \ fi -install-dependencies: ## Install all dependencies including extras +install-dependencies: ## Install dependencies including dev, docs, and all extras poetry install --all-extras -install: install-poetry install-dependencies ## Install Poetry and dependencies - -# =============== -# Code Validation -# =============== - -##@ Quality +install: | install-poetry install-dependencies check-license: ## Check license headers ./dev/check-license -lint: ## Run code linters via pre-commit +lint: ## lint poetry run pre-commit run --all-files -# =============== -# Testing Section -# =============== - -##@ Testing - -test: ## Run all unit tests (excluding integration) - $(TEST_RUNNER) pytest tests/ -m "(unmarked or parametrize) and not integration" $(PYTEST_ARGS) +test: ## Run all unit tests, can add arguments with PYTEST_ARGS="-vv" + poetry run pytest tests/ -m "(unmarked or parametrize) and not integration" ${PYTEST_ARGS} -test-integration: test-integration-setup test-integration-exec ## Run integration tests +test-s3: # Run tests marked with s3, can add arguments with PYTEST_ARGS="-vv" + sh ./dev/run-minio.sh + poetry run pytest tests/ -m s3 ${PYTEST_ARGS} -test-integration-setup: ## Start Docker services for integration tests +test-integration: ## Run all integration tests, can add arguments with PYTEST_ARGS="-vv" docker compose -f dev/docker-compose-integration.yml kill docker compose -f dev/docker-compose-integration.yml rm -f docker compose -f dev/docker-compose-integration.yml up -d sleep 10 docker compose -f dev/docker-compose-integration.yml cp ./dev/provision.py spark-iceberg:/opt/spark/provision.py docker compose -f dev/docker-compose-integration.yml exec -T spark-iceberg ipython ./provision.py + poetry run pytest tests/ -v -m integration ${PYTEST_ARGS} -test-integration-exec: ## Run integration tests (excluding provision) - $(TEST_RUNNER) pytest tests/ -m integration $(PYTEST_ARGS) - -test-integration-rebuild: ## Rebuild integration Docker services from scratch +test-integration-rebuild: docker compose -f dev/docker-compose-integration.yml kill docker compose -f dev/docker-compose-integration.yml rm -f docker compose -f dev/docker-compose-integration.yml build --no-cache -test-s3: ## Run tests marked with @pytest.mark.s3 - sh ./dev/run-minio.sh - $(TEST_RUNNER) pytest tests/ -m s3 $(PYTEST_ARGS) - -test-adls: ## Run tests marked with @pytest.mark.adls +test-adls: ## Run tests marked with adls, can add arguments with PYTEST_ARGS="-vv" sh ./dev/run-azurite.sh - $(TEST_RUNNER) pytest tests/ -m adls $(PYTEST_ARGS) + poetry run pytest tests/ -m adls ${PYTEST_ARGS} -test-gcs: ## Run tests marked with @pytest.mark.gcs +test-gcs: ## Run tests marked with gcs, can add arguments with PYTEST_ARGS="-vv" sh ./dev/run-gcs-server.sh - $(TEST_RUNNER) pytest tests/ -m gcs $(PYTEST_ARGS) + poetry run pytest tests/ -m gcs ${PYTEST_ARGS} + +test-coverage-unit: # Run test with coverage for unit tests, can add arguments with PYTEST_ARGS="-vv" + poetry run coverage run --source=pyiceberg/ --data-file=.coverage.unit -m pytest tests/ -v -m "(unmarked or parametrize) and not integration" ${PYTEST_ARGS} -test-coverage: COVERAGE=1 -test-coverage: test test-integration test-s3 test-adls test-gcs coverage-report ## Run all tests with coverage and report +test-coverage-integration: # Run test with coverage for integration tests, can add arguments with PYTEST_ARGS="-vv" + docker compose -f dev/docker-compose-integration.yml kill + docker compose -f dev/docker-compose-integration.yml rm -f + docker compose -f dev/docker-compose-integration.yml up -d + sh ./dev/run-azurite.sh + sh ./dev/run-gcs-server.sh + sleep 10 + docker compose -f dev/docker-compose-integration.yml cp ./dev/provision.py spark-iceberg:/opt/spark/provision.py + docker compose -f dev/docker-compose-integration.yml exec -T spark-iceberg ipython ./provision.py + poetry run coverage run --source=pyiceberg/ --data-file=.coverage.integration -m pytest tests/ -v -m integration ${PYTEST_ARGS} -coverage-report: ## Combine and report coverage - poetry run coverage combine - poetry run coverage report -m --fail-under=$(COVERAGE_FAIL_UNDER) +test-coverage: | test-coverage-unit test-coverage-integration ## Run all tests with coverage including unit and integration tests + poetry run coverage combine .coverage.unit .coverage.integration + poetry run coverage report -m --fail-under=90 poetry run coverage html poetry run coverage xml -# ================ -# Documentation -# ================ - -##@ Documentation -docs-install: ## Install docs dependencies - poetry install --with docs - -docs-serve: ## Serve local docs preview (hot reload) - poetry run mkdocs serve -f mkdocs/mkdocs.yml - -docs-build: ## Build the static documentation site - poetry run mkdocs build -f mkdocs/mkdocs.yml --strict - -# =================== -# Project Maintenance -# =================== - -##@ Maintenance - -clean: ## Remove build artifacts and caches - @echo "Cleaning up Cython and Python cached files..." +clean: ## Clean up the project Python working environment + @echo "Cleaning up Cython and Python cached files" @rm -rf build dist *.egg-info @find . -name "*.so" -exec echo Deleting {} \; -delete @find . -name "*.pyc" -exec echo Deleting {} \; -delete @find . -name "__pycache__" -exec echo Deleting {} \; -exec rm -rf {} + @find . -name "*.pyd" -exec echo Deleting {} \; -delete @find . -name "*.pyo" -exec echo Deleting {} \; -delete - @echo "Cleanup complete." + @echo "Cleanup complete" + +docs-install: + poetry install --with docs + +docs-serve: + poetry run mkdocs serve -f mkdocs/mkdocs.yml + +docs-build: + poetry run mkdocs build -f mkdocs/mkdocs.yml --strict diff --git a/dev/.rat-excludes b/dev/.rat-excludes index 084c53d2c4..e93e25b69e 100644 --- a/dev/.rat-excludes +++ b/dev/.rat-excludes @@ -1,4 +1,3 @@ -.github/* .rat-excludes build .git diff --git a/dev/Dockerfile b/dev/Dockerfile index 5292e26421..e9cf952556 100644 --- a/dev/Dockerfile +++ b/dev/Dockerfile @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM python:3.12-bullseye +FROM python:3.9-bullseye RUN apt-get -qq update && \ apt-get -qq install -y --no-install-recommends \ @@ -37,10 +37,10 @@ RUN mkdir -p ${HADOOP_HOME} && mkdir -p ${SPARK_HOME} && mkdir -p /home/iceberg/ WORKDIR ${SPARK_HOME} # Remember to also update `tests/conftest`'s spark setting -ENV SPARK_VERSION=3.5.6 +ENV SPARK_VERSION=3.5.4 ENV ICEBERG_SPARK_RUNTIME_VERSION=3.5_2.12 -ENV ICEBERG_VERSION=1.9.1 -ENV PYICEBERG_VERSION=0.9.1 +ENV ICEBERG_VERSION=1.8.0 +ENV PYICEBERG_VERSION=0.8.1 RUN curl --retry 5 -s -C - https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop3.tgz -o spark-${SPARK_VERSION}-bin-hadoop3.tgz \ && tar xzf spark-${SPARK_VERSION}-bin-hadoop3.tgz --directory /opt/spark --strip-components 1 \ @@ -63,7 +63,7 @@ RUN chmod u+x /opt/spark/sbin/* && \ RUN pip3 install -q ipython -RUN pip3 install "pyiceberg[s3fs,hive,pyarrow]==${PYICEBERG_VERSION}" +RUN pip3 install "pyiceberg[s3fs,hive]==${PYICEBERG_VERSION}" COPY entrypoint.sh . COPY provision.py . diff --git a/dev/docker-compose-azurite.yml b/dev/docker-compose-azurite.yml index 5391b8e2f5..4091ff94b5 100644 --- a/dev/docker-compose-azurite.yml +++ b/dev/docker-compose-azurite.yml @@ -22,4 +22,4 @@ services: hostname: azurite ports: - 10000:10000 - command: ["azurite-blob", "--loose", "--skipApiVersionCheck", "--blobHost", "0.0.0.0"] + command: ["azurite-blob", "--loose", "--blobHost", "0.0.0.0"] diff --git a/dev/docker-compose-integration.yml b/dev/docker-compose-integration.yml index c901b2ee23..cdae1f6695 100644 --- a/dev/docker-compose-integration.yml +++ b/dev/docker-compose-integration.yml @@ -53,7 +53,6 @@ services: - CATALOG_WAREHOUSE=s3://warehouse/ - CATALOG_IO__IMPL=org.apache.iceberg.aws.s3.S3FileIO - CATALOG_S3_ENDPOINT=http://minio:9000 - - CATALOG_JDBC_STRICT__MODE=true minio: image: minio/minio container_name: pyiceberg-minio @@ -82,7 +81,7 @@ services: - AWS_REGION=us-east-1 entrypoint: > /bin/sh -c " - until (/usr/bin/mc alias set minio http://minio:9000 admin password) do echo '...waiting...' && sleep 1; done; + until (/usr/bin/mc config host add minio http://minio:9000 admin password) do echo '...waiting...' && sleep 1; done; /usr/bin/mc mb minio/warehouse; /usr/bin/mc policy set public minio/warehouse; tail -f /dev/null diff --git a/dev/docker-compose.yml b/dev/docker-compose.yml index 609ac8d51f..5c2c800e5c 100644 --- a/dev/docker-compose.yml +++ b/dev/docker-compose.yml @@ -38,7 +38,7 @@ services: - AWS_REGION=us-east-1 entrypoint: > /bin/sh -c " - until (/usr/bin/mc alias set minio http://minio:9000 admin password) do echo '...waiting...' && sleep 1; done; + until (/usr/bin/mc config host add minio http://minio:9000 admin password) do echo '...waiting...' && sleep 1; done; /usr/bin/mc rm -r --force minio/warehouse; /usr/bin/mc mb minio/warehouse; /usr/bin/mc policy set public minio/warehouse; diff --git a/dev/hive/core-site.xml b/dev/hive/core-site.xml index f5a9473b51..b77332b83b 100644 --- a/dev/hive/core-site.xml +++ b/dev/hive/core-site.xml @@ -50,9 +50,4 @@ fs.s3a.path.style.access true - - hive.metastore.disallow.incompatible.col.type.changes - false - - diff --git a/dev/provision.py b/dev/provision.py index 231f5123ce..b358da6593 100644 --- a/dev/provision.py +++ b/dev/provision.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -import math from pyspark.sql import SparkSession from pyspark.sql.functions import current_date, date_add, expr @@ -114,99 +113,89 @@ """ ) - # Merge on read has been implemented in version ≥2: - # v2: Using positional deletes - # v3: Using deletion vectors - - for format_version in [2, 3]: - identifier = f'{catalog_name}.default.test_positional_mor_deletes_v{format_version}' - spark.sql( - f""" - CREATE OR REPLACE TABLE {identifier} ( - dt date, - number integer, - letter string - ) - USING iceberg - TBLPROPERTIES ( - 'write.delete.mode'='merge-on-read', - 'write.update.mode'='merge-on-read', - 'write.merge.mode'='merge-on-read', - 'format-version'='{format_version}' - ); - """ - ) + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_positional_mor_deletes ( + dt date, + number integer, + letter string + ) + USING iceberg + TBLPROPERTIES ( + 'write.delete.mode'='merge-on-read', + 'write.update.mode'='merge-on-read', + 'write.merge.mode'='merge-on-read', + 'format-version'='2' + ); + """ + ) - spark.sql( - f""" - INSERT INTO {identifier} - VALUES - (CAST('2023-03-01' AS date), 1, 'a'), - (CAST('2023-03-02' AS date), 2, 'b'), - (CAST('2023-03-03' AS date), 3, 'c'), - (CAST('2023-03-04' AS date), 4, 'd'), - (CAST('2023-03-05' AS date), 5, 'e'), - (CAST('2023-03-06' AS date), 6, 'f'), - (CAST('2023-03-07' AS date), 7, 'g'), - (CAST('2023-03-08' AS date), 8, 'h'), - (CAST('2023-03-09' AS date), 9, 'i'), - (CAST('2023-03-10' AS date), 10, 'j'), - (CAST('2023-03-11' AS date), 11, 'k'), - (CAST('2023-03-12' AS date), 12, 'l'); - """ - ) + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_positional_mor_deletes + VALUES + (CAST('2023-03-01' AS date), 1, 'a'), + (CAST('2023-03-02' AS date), 2, 'b'), + (CAST('2023-03-03' AS date), 3, 'c'), + (CAST('2023-03-04' AS date), 4, 'd'), + (CAST('2023-03-05' AS date), 5, 'e'), + (CAST('2023-03-06' AS date), 6, 'f'), + (CAST('2023-03-07' AS date), 7, 'g'), + (CAST('2023-03-08' AS date), 8, 'h'), + (CAST('2023-03-09' AS date), 9, 'i'), + (CAST('2023-03-10' AS date), 10, 'j'), + (CAST('2023-03-11' AS date), 11, 'k'), + (CAST('2023-03-12' AS date), 12, 'l'); + """ + ) - spark.sql(f"ALTER TABLE {identifier} CREATE TAG tag_12") + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes CREATE TAG tag_12") - spark.sql(f"ALTER TABLE {identifier} CREATE BRANCH without_5") + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes CREATE BRANCH without_5") - spark.sql(f"DELETE FROM {identifier}.branch_without_5 WHERE number = 5") + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_deletes.branch_without_5 WHERE number = 5") - spark.sql(f"DELETE FROM {identifier} WHERE number = 9") + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_deletes WHERE number = 9") - identifier = f'{catalog_name}.default.test_positional_mor_double_deletes_v{format_version}' + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_positional_mor_double_deletes ( + dt date, + number integer, + letter string + ) + USING iceberg + TBLPROPERTIES ( + 'write.delete.mode'='merge-on-read', + 'write.update.mode'='merge-on-read', + 'write.merge.mode'='merge-on-read', + 'format-version'='2' + ); + """ + ) - spark.sql( - f""" - CREATE OR REPLACE TABLE {identifier} ( - dt date, - number integer, - letter string - ) - USING iceberg - TBLPROPERTIES ( - 'write.delete.mode'='merge-on-read', - 'write.update.mode'='merge-on-read', - 'write.merge.mode'='merge-on-read', - 'format-version'='2' - ); - """ - ) + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_positional_mor_double_deletes + VALUES + (CAST('2023-03-01' AS date), 1, 'a'), + (CAST('2023-03-02' AS date), 2, 'b'), + (CAST('2023-03-03' AS date), 3, 'c'), + (CAST('2023-03-04' AS date), 4, 'd'), + (CAST('2023-03-05' AS date), 5, 'e'), + (CAST('2023-03-06' AS date), 6, 'f'), + (CAST('2023-03-07' AS date), 7, 'g'), + (CAST('2023-03-08' AS date), 8, 'h'), + (CAST('2023-03-09' AS date), 9, 'i'), + (CAST('2023-03-10' AS date), 10, 'j'), + (CAST('2023-03-11' AS date), 11, 'k'), + (CAST('2023-03-12' AS date), 12, 'l'); + """ + ) - spark.sql( - f""" - INSERT INTO {identifier} - VALUES - (CAST('2023-03-01' AS date), 1, 'a'), - (CAST('2023-03-02' AS date), 2, 'b'), - (CAST('2023-03-03' AS date), 3, 'c'), - (CAST('2023-03-04' AS date), 4, 'd'), - (CAST('2023-03-05' AS date), 5, 'e'), - (CAST('2023-03-06' AS date), 6, 'f'), - (CAST('2023-03-07' AS date), 7, 'g'), - (CAST('2023-03-08' AS date), 8, 'h'), - (CAST('2023-03-09' AS date), 9, 'i'), - (CAST('2023-03-10' AS date), 10, 'j'), - (CAST('2023-03-11' AS date), 11, 'k'), - (CAST('2023-03-12' AS date), 12, 'l'); - """ - ) + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_double_deletes WHERE number = 9") - # Perform two deletes, should produce: - # v2: two positional delete files in v2 - # v3: one deletion vector since they are merged - spark.sql(f"DELETE FROM {identifier} WHERE number = 9") - spark.sql(f"DELETE FROM {identifier} WHERE letter == 'f'") + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_double_deletes WHERE letter == 'f'") all_types_dataframe = ( spark.range(0, 5, 1, 5) @@ -273,9 +262,13 @@ """ ) + # There is an issue with CREATE OR REPLACE + # https://github.com/apache/iceberg/issues/8756 + spark.sql(f"DROP TABLE IF EXISTS {catalog_name}.default.test_table_version") + spark.sql( f""" - CREATE OR REPLACE TABLE {catalog_name}.default.test_table_version ( + CREATE TABLE {catalog_name}.default.test_table_version ( dt date, number integer, letter string @@ -335,7 +328,6 @@ CREATE TABLE {catalog_name}.default.test_table_empty_list_and_map ( col_list array, col_map map, - col_struct struct, col_list_with_struct array> ) USING iceberg @@ -348,8 +340,8 @@ spark.sql( f""" INSERT INTO {catalog_name}.default.test_table_empty_list_and_map - VALUES (null, null, null, null), - (array(), map(), struct(1), array(struct(1))) + VALUES (null, null, null), + (array(), map(), array(struct(1))) """ ) diff --git a/mkdocs/docs/SUMMARY.md b/mkdocs/docs/SUMMARY.md index d268bcc4b0..c344b2fdd2 100644 --- a/mkdocs/docs/SUMMARY.md +++ b/mkdocs/docs/SUMMARY.md @@ -24,8 +24,6 @@ - [Configuration](configuration.md) - [CLI](cli.md) - [API](api.md) - - [Row Filter Syntax](row-filter-syntax.md) - - [Expression DSL](expression-dsl.md) - [Contributing](contributing.md) - [Community](community.md) - Releases diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 070a6d08e4..7978fdc9b4 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -24,7 +24,7 @@ hide: # Python API -(Py)Iceberg is [catalog](https://iceberg.apache.org/terms/#catalog) centric. Meaning that reading/writing data goes via a catalog. First step is to instantiate a catalog to load a table. Let's use the following configuration in `.pyiceberg.yaml` to define a REST catalog called `prod`: +PyIceberg is based around catalogs to load tables. First step is to instantiate a catalog that loads tables. Let's use the following configuration to define a catalog called `prod`: ```yaml catalog: @@ -33,7 +33,7 @@ catalog: credential: t-1234:secret ``` -Note that multiple catalogs can be defined in the same `.pyiceberg.yaml`, for example, in the case of a Hive and REST catalog: +Note that multiple catalogs can be defined in the same `.pyiceberg.yaml`: ```yaml catalog: @@ -47,11 +47,13 @@ catalog: warehouse: my-warehouse ``` -The different catalogs can be loaded in PyIceberg by their name: `load_catalog(name="hive")` and `load_catalog(name="rest")`. An overview of the configuration options can be found on the [configuration page](https://py.iceberg.apache.org/configuration/). +and loaded in python by calling `load_catalog(name="hive")` and `load_catalog(name="rest")`. This information must be placed inside a file called `.pyiceberg.yaml` located either in the `$HOME` or `%USERPROFILE%` directory (depending on whether the operating system is Unix-based or Windows-based, respectively), in the current working directory, or in the `$PYICEBERG_HOME` directory (if the corresponding environment variable is set). -It is also possible to load a catalog without using a `.pyiceberg.yaml` by passing in the properties directly: +For more details on possible configurations refer to the [specific page](https://py.iceberg.apache.org/configuration/). + +Then load the `prod` catalog: ```python from pyiceberg.catalog import load_catalog @@ -68,13 +70,13 @@ catalog = load_catalog( ) ``` -Next, create a namespace: +Let's create a namespace: ```python catalog.create_namespace("docs_example") ``` -Or, list existing namespaces: +And then list them: ```python ns = catalog.list_namespaces() @@ -82,6 +84,12 @@ ns = catalog.list_namespaces() assert ns == [("docs_example",)] ``` +And then list tables in the namespace: + +```python +catalog.list_tables("docs_example") +``` + ## Create a table To create a table from a catalog: @@ -115,21 +123,24 @@ schema = Schema( ) from pyiceberg.partitioning import PartitionSpec, PartitionField +from pyiceberg.transforms import DayTransform partition_spec = PartitionSpec( PartitionField( - source_id=1, field_id=1000, transform="day", name="datetime_day" + source_id=1, field_id=1000, transform=DayTransform(), name="datetime_day" ) ) from pyiceberg.table.sorting import SortOrder, SortField +from pyiceberg.transforms import IdentityTransform # Sort on the symbol -sort_order = SortOrder(SortField(source_id=2, transform='identity')) +sort_order = SortOrder(SortField(source_id=2, transform=IdentityTransform())) catalog.create_table( identifier="docs_example.bids", schema=schema, + location="s3://pyiceberg", partition_spec=partition_spec, sort_order=sort_order, ) @@ -142,11 +153,13 @@ To create a table using a pyarrow schema: ```python import pyarrow as pa -schema = pa.schema([ +schema = pa.schema( + [ pa.field("foo", pa.string(), nullable=True), pa.field("bar", pa.int32(), nullable=False), pa.field("baz", pa.bool_(), nullable=True), -]) + ] +) catalog.create_table( identifier="docs_example.bids", @@ -154,12 +167,18 @@ catalog.create_table( ) ``` -Another API to create a table is using the `create_table_transaction`. This follows the same APIs when making updates to a table. This is a friendly API for both setting the partition specification and sort-order, because you don't have to deal with field-IDs. +To create a table with some subsequent changes atomically in a transaction: ```python -with catalog.create_table_transaction(identifier="docs_example.bids", schema=schema) as txn: +with catalog.create_table_transaction( + identifier="docs_example.bids", + schema=schema, + location="s3://pyiceberg", + partition_spec=partition_spec, + sort_order=sort_order, +) as txn: with txn.update_schema() as update_schema: - update_schema.add_column(path="new_column", field_type='string') + update_schema.add_column(path="new_column", field_type=StringType()) with txn.update_spec() as update_spec: update_spec.add_identity("symbol") @@ -169,8 +188,6 @@ with catalog.create_table_transaction(identifier="docs_example.bids", schema=sch ## Load a table -There are two ways of reading an Iceberg table; through a catalog, and by pointing at the Iceberg metadata directly. Reading through a catalog is preferred, and directly pointing at the metadata is read-only. - ### Catalog table Loading the `bids` table: @@ -186,7 +203,7 @@ This returns a `Table` that represents an Iceberg table that can be queried and ### Static table -To load a table directly from a `metadata.json` file (i.e., **without** using a catalog), you can use a `StaticTable` as follows: +To load a table directly from a metadata file (i.e., **without** using a catalog), you can use a `StaticTable` as follows: ```python from pyiceberg.table import StaticTable @@ -196,15 +213,7 @@ static_table = StaticTable.from_metadata( ) ``` -The static-table does not allow for write operations. If your table metadata directory contains a `version-hint.text` file, you can just specify the table root path, and the latest `metadata.json` file will be resolved automatically: - -```python -from pyiceberg.table import StaticTable - -static_table = StaticTable.from_metadata( - "s3://warehouse/wh/nyc.db/taxis" -) -``` +The static-table is considered read-only. ## Check if a table exists @@ -216,9 +225,9 @@ catalog.table_exists("docs_example.bids") Returns `True` if the table already exists. -## Write to a table +## Write support -Reading and writing is being done using [Apache Arrow](https://arrow.apache.org/). Arrow is an in-memory columnar format for fast data interchange and in-memory analytics. Let's consider the following Arrow Table: +With PyIceberg 0.6.0 write support is added through Arrow. Let's consider an Arrow Table: ```python import pyarrow as pa @@ -233,22 +242,31 @@ df = pa.Table.from_pylist( ) ``` -Next, create a table using the Arrow schema: +Next, create a table based on the schema: ```python from pyiceberg.catalog import load_catalog catalog = load_catalog("default") -tbl = catalog.create_table("default.cities", schema=df.schema) +from pyiceberg.schema import Schema +from pyiceberg.types import NestedField, StringType, DoubleType + +schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), +) + +tbl = catalog.create_table("default.cities", schema=schema) ``` -Next, write the data to the table. Both `append` and `overwrite` produce the same result, since the table is empty on creation: +Now write the data to the table: !!! note inline end "Fast append" - PyIceberg defaults to the [fast append](https://iceberg.apache.org/spec/#snapshots) to minimize the amount of data written. This enables fast commit operations, reducing the possibility of conflicts. The downside of the fast append is that it creates more metadata than a merge commit. [Compaction is planned](https://github.com/apache/iceberg-python/issues/270) and will automatically rewrite all the metadata when a threshold is hit, to maintain performant reads. + PyIceberg default to the [fast append](https://iceberg.apache.org/spec/#snapshots) to minimize the amount of data written. This enables quick writes, reducing the possibility of conflicts. The downside of the fast append is that it creates more metadata than a normal commit. [Compaction is planned](https://github.com/apache/iceberg-python/issues/270) and will automatically rewrite all the metadata when a threshold is hit, to maintain performant reads. @@ -260,7 +278,7 @@ tbl.append(df) tbl.overwrite(df) ``` -Now, the data is written to the table, and the table can be read using `tbl.scan().to_arrow()`: +The data is written to the table, and when the table is read using `tbl.scan().to_arrow()`: ```python pyarrow.Table @@ -273,12 +291,14 @@ lat: [[52.371807,37.773972,53.11254,48.864716]] long: [[4.896029,-122.431297,6.0989,2.349014]] ``` -If we want to add more data, we can use `.append()` again: +You both can use `append(df)` or `overwrite(df)` since there is no data yet. If we want to add more data, we can use `.append()` again: ```python -tbl.append(pa.Table.from_pylist( +df = pa.Table.from_pylist( [{"city": "Groningen", "lat": 53.21917, "long": 6.56667}], -)) +) + +tbl.append(df) ``` When reading the table `tbl.scan().to_arrow()` you can see that `Groningen` is now also part of the table: @@ -294,30 +314,33 @@ lat: [[52.371807,37.773972,53.11254,48.864716],[53.21917]] long: [[4.896029,-122.431297,6.0989,2.349014],[6.56667]] ``` -The nested lists indicate the different Arrow buffers. Each of the writes produce a [Parquet file](https://parquet.apache.org/) where each [row group](https://parquet.apache.org/docs/concepts/) translates into an Arrow buffer. In the case where the table is large, PyIceberg also allows the option to stream the buffers using the Arrow [RecordBatchReader](https://arrow.apache.org/docs/python/generated/pyarrow.RecordBatchReader.html), avoiding pulling everything into memory right away: +The nested lists indicate the different Arrow buffers, where the first write results into a buffer, and the second append in a separate buffer. This is expected since it will read two parquet files. + +To avoid any type errors during writing, you can enforce the PyArrow table types using the Iceberg table schema: ```python -for buf in tbl.scan().to_arrow_batch_reader(): - print(f"Buffer contains {len(buf)} rows") -``` +from pyiceberg.catalog import load_catalog +import pyarrow as pa -To avoid any type inconsistencies during writing, you can convert the Iceberg table schema to Arrow: +catalog = load_catalog("default") +table = catalog.load_table("default.cities") +schema = table.schema().as_arrow() -```python df = pa.Table.from_pylist( - [{"city": "Groningen", "lat": 53.21917, "long": 6.56667}], schema=table.schema().as_arrow() + [{"city": "Groningen", "lat": 53.21917, "long": 6.56667}], schema=schema ) -tbl.append(df) +table.append(df) ``` -You can delete some of the data from the table by calling `tbl.delete()` with a desired `delete_filter`. This will use the Iceberg metadata to only open up the Parquet files that contain relevant information. +You can delete some of the data from the table by calling `tbl.delete()` with a desired `delete_filter`. ```python tbl.delete(delete_filter="city == 'Paris'") ``` -In the above example, any records where the city field value equals to `Paris` will be deleted. Running `tbl.scan().to_arrow()` will now yield: +In the above example, any records where the city field value equals to `Paris` will be deleted. +Running `tbl.scan().to_arrow()` will now yield: ```python pyarrow.Table @@ -330,11 +353,30 @@ lat: [[52.371807,37.773972,53.11254],[53.21917]] long: [[4.896029,-122.431297,6.0989],[6.56667]] ``` -In the case of `tbl.delete(delete_filter="city == 'Groningen'")`, the whole Parquet file will be dropped without checking it contents, since from the Iceberg metadata PyIceberg can derive that all the content in the file matches the predicate. - ### Partial overwrites -When using the `overwrite` API, you can use an `overwrite_filter` to delete data that matches the filter before appending new data into the table. For example, consider the following Iceberg table: +When using the `overwrite` API, you can use an `overwrite_filter` to delete data that matches the filter before appending new data into the table. + +For example, with an iceberg table created as: + +```python +from pyiceberg.catalog import load_catalog + +catalog = load_catalog("default") + +from pyiceberg.schema import Schema +from pyiceberg.types import NestedField, StringType, DoubleType + +schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), +) + +tbl = catalog.create_table("default.cities", schema=schema) +``` + +And with initial data populating the table: ```python import pyarrow as pa @@ -346,12 +388,6 @@ df = pa.Table.from_pylist( {"city": "Paris", "lat": 48.864716, "long": 2.349014}, ], ) - -from pyiceberg.catalog import load_catalog -catalog = load_catalog("default") - -tbl = catalog.create_table("default.cities", schema=df.schema) - tbl.append(df) ``` @@ -1287,50 +1323,6 @@ with table.manage_snapshots() as ms: ms.create_branch(snapshot_id1, "Branch_A").create_tag(snapshot_id2, "tag789") ``` -## Table Maintenance - -PyIceberg provides table maintenance operations through the `table.maintenance` API. This provides a clean interface for performing maintenance tasks like snapshot expiration. - -### Snapshot Expiration - -Expire old snapshots to clean up table metadata and reduce storage costs: - -```python -# Expire snapshots older than three days -from datetime import datetime, timedelta -table.maintenance.expire_snapshots().older_than( - datetime.now() - timedelta(days=3) -).commit() - -# Expire a specific snapshot by ID -table.maintenance.expire_snapshots().by_id(12345).commit() - -# Context manager usage (recommended for multiple operations) -with table.maintenance.expire_snapshots() as expire: - expire.by_id(12345) - expire.by_id(67890) - # Automatically commits when exiting the context -``` - -#### Real-world Example - -```python -def cleanup_old_snapshots(table_name: str, snapshot_ids: list[int]): - """Remove specific snapshots from a table.""" - catalog = load_catalog("production") - table = catalog.load_table(table_name) - - # Use context manager for safe transaction handling - with table.maintenance.expire_snapshots() as expire: - for snapshot_id in snapshot_ids: - expire.by_id(snapshot_id) - - print(f"Expired {len(snapshot_ids)} snapshots from {table_name}") - -# Usage -cleanup_old_snapshots("analytics.user_events", [12345, 67890, 11111]) -``` - ## Views PyIceberg supports view operations. @@ -1567,55 +1559,9 @@ print(ray_dataset.take(2)) ] ``` -### Bodo - -PyIceberg interfaces closely with Bodo Dataframes (see [Bodo Iceberg Quick Start](https://docs.bodo.ai/latest/quick_start/quickstart_local_iceberg/)), -which provides a drop-in replacement for Pandas that applies query, compiler and HPC optimizations automatically. -Bodo accelerates and scales Python code from single laptops to large clusters without code rewrites. - - - -!!! note "Requirements" - This requires [`bodo` to be installed](index.md). - -```python -pip install pyiceberg['bodo'] -``` - - -A table can be read easily into a Bodo Dataframe to perform Pandas operations: - -```python -df = table.to_bodo() # equivalent to `bodo.pandas.read_iceberg_table(table)` -df = df[df["trip_distance"] >= 10.0] -df = df[["VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime"]] -print(df) -``` - -This creates a lazy query, optimizes it, and runs it on all available cores (print triggers execution): - -```python - VendorID tpep_pickup_datetime tpep_dropoff_datetime -0 2 2023-01-01 00:27:12 2023-01-01 00:49:56 -1 2 2023-01-01 00:09:29 2023-01-01 00:29:23 -2 1 2023-01-01 00:13:30 2023-01-01 00:44:00 -3 2 2023-01-01 00:41:41 2023-01-01 01:19:32 -4 2 2023-01-01 00:22:39 2023-01-01 01:30:45 -... ... ... ... -245478 2 2023-01-31 22:32:57 2023-01-31 23:01:48 -245479 2 2023-01-31 22:03:26 2023-01-31 22:46:13 -245480 2 2023-01-31 23:25:56 2023-02-01 00:05:42 -245481 2 2023-01-31 23:18:00 2023-01-31 23:46:00 -245482 2 2023-01-31 23:18:00 2023-01-31 23:41:00 - -[245483 rows x 3 columns] -``` - -Bodo is optimized to take advantage of Iceberg features such as hidden partitioning and various statistics for efficient reads. - ### Daft -PyIceberg interfaces closely with Daft Dataframes (see also: [Daft integration with Iceberg](https://docs.daft.ai/en/stable/io/iceberg/)) which provides a full lazily optimized query engine interface on top of PyIceberg tables. +PyIceberg interfaces closely with Daft Dataframes (see also: [Daft integration with Iceberg](https://www.getdaft.io/projects/docs/en/stable/integrations/iceberg/)) which provides a full lazily optimized query engine interface on top of PyIceberg tables. @@ -1801,66 +1747,3 @@ shape: (11, 4) │ 21 ┆ 566 ┆ Incorrect billing amount ┆ 2022-04-17 10:53:20 │ └───────────┴─────────────┴────────────────────────────┴─────────────────────┘ ``` - -### Apache DataFusion - -PyIceberg integrates with [Apache DataFusion](https://datafusion.apache.org/) through the Custom Table Provider interface ([FFI_TableProvider](https://datafusion.apache.org/python/user-guide/io/table_provider.html)) exposed through `iceberg-rust`. - - - -!!! note "Requirements" - This requires [`datafusion` to be installed](index.md). - - - - - -!!! warning "Experimental Feature" - The DataFusion integration is considered **experimental**. - - The integration has a few caveats: - - - Only works with `datafusion >= 45` - - Depends directly on `iceberg-rust` instead of PyIceberg's implementation - - Has limited features compared to the full PyIceberg API - - The integration will improve as both DataFusion and `iceberg-rust` matures. - - - -PyIceberg tables can be registered directly with DataFusion's SessionContext using the table provider interface. - -```python -from datafusion import SessionContext -from pyiceberg.catalog import load_catalog -import pyarrow as pa - -# Load catalog and create/load a table -catalog = load_catalog("catalog", type="in-memory") -catalog.create_namespace_if_not_exists("default") - -# Create some sample data -data = pa.table({"x": [1, 2, 3], "y": [4, 5, 6]}) -iceberg_table = catalog.create_table("default.test", schema=data.schema) -iceberg_table.append(data) - -# Register the table with DataFusion -ctx = SessionContext() -ctx.register_table_provider("test", iceberg_table) - -# Query the table using DataFusion SQL -ctx.table("test").show() -``` - -This will output: - -```python -DataFrame() -+---+---+ -| x | y | -+---+---+ -| 1 | 4 | -| 2 | 5 | -| 3 | 6 | -+---+---+ -``` diff --git a/mkdocs/docs/cli.md b/mkdocs/docs/cli.md index 984e0df43d..28e44955d7 100644 --- a/mkdocs/docs/cli.md +++ b/mkdocs/docs/cli.md @@ -219,19 +219,3 @@ Or output in JSON for automation: } } ``` - -You can also add, update or remove properties on tables or namespaces: - -```sh -➜ pyiceberg properties set table nyc.taxis write.metadata.delete-after-commit.enabled true -Set write.metadata.delete-after-commit.enabled=true on nyc.taxis - -➜ pyiceberg properties get table nyc.taxis -write.metadata.delete-after-commit.enabled true - -➜ pyiceberg properties remove table nyc.taxis write.metadata.delete-after-commit.enabled -Property write.metadata.delete-after-commit.enabled removed from nyc.taxis - -➜ pyiceberg properties get table nyc.taxis write.metadata.delete-after-commit.enabled -Could not find property write.metadata.delete-after-commit.enabled on nyc.taxis -``` diff --git a/mkdocs/docs/community.md b/mkdocs/docs/community.md index bbfe8aa4f2..4c542bff8e 100644 --- a/mkdocs/docs/community.md +++ b/mkdocs/docs/community.md @@ -30,7 +30,7 @@ Community discussions happen primarily on the [dev mailing list](https://lists.a ## Iceberg Community Events -The PyIceberg community sync is on the last Tuesday of every month. The calendar event is located on the [Iceberg Dev Events](https://iceberg.apache.org/community#iceberg-community-events) calendar. +The PyIceberg community sync is on the last Tuesday of every month. To join, make sure to subscribe to the [iceberg-python-sync Google group](https://groups.google.com/g/iceberg-python-sync). ## Community Guidelines diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 40cfc0b8c9..c7c26c4912 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -64,7 +64,7 @@ Iceberg tables support table properties to configure table behavior. | `write.parquet.dict-size-bytes` | Size in bytes | 2MB | Set the dictionary page size limit per row group | | `write.metadata.previous-versions-max` | Integer | 100 | The max number of previous version metadata files to keep before deleting after commit. | | `write.metadata.delete-after-commit.enabled` | Boolean | False | Whether to automatically delete old *tracked* metadata files after each table commit. It will retain a number of the most recent metadata files, which can be set using property `write.metadata.previous-versions-max`. | -| `write.object-storage.enabled` | Boolean | False | Enables the [`ObjectStoreLocationProvider`](configuration.md#object-store-location-provider) that adds a hash component to file paths. | +| `write.object-storage.enabled` | Boolean | True | Enables the [`ObjectStoreLocationProvider`](configuration.md#object-store-location-provider) that adds a hash component to file paths. Note: the default value of `True` differs from Iceberg's Java implementation | | `write.object-storage.partitioned-paths` | Boolean | True | Controls whether [partition values are included in file paths](configuration.md#partition-exclusion) when object storage is enabled | | `write.py-location-provider.impl` | String of form `module.ClassName` | null | Optional, [custom `LocationProvider`](configuration.md#loading-a-custom-location-provider) implementation | | `write.data.path` | String pointing to location | `{metadata.location}/data` | Sets the location under which data is written. | @@ -75,7 +75,7 @@ Iceberg tables support table properties to configure table behavior. | Key | Options | Default | Description | | ------------------------------------ | ------------------- | ------------- | ----------------------------------------------------------- | | `commit.manifest.target-size-bytes` | Size in bytes | 8388608 (8MB) | Target size when merging manifest files | -| `commit.manifest.min-count-to-merge` | Number of manifests | 100 | Minimum number of manifests to accumulate before merging | +| `commit.manifest.min-count-to-merge` | Number of manifests | 100 | Target size when merging manifest files | | `commit.manifest-merge.enabled` | Boolean | False | Controls whether to automatically merge manifests on writes | @@ -95,7 +95,6 @@ Iceberg works with the concept of a FileIO which is a pluggable module for readi - **hdfs**: `PyArrowFileIO` - **abfs**, **abfss**: `FsspecFileIO` - **oss**: `PyArrowFileIO` -- **hf**: `FsspecFileIO` You can also set the FileIO explicitly: @@ -109,24 +108,22 @@ For the FileIO there are several configuration options available: -| Key | Example | Description | -|-----------------------------|----------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| s3.endpoint | | Configure an alternative endpoint of the S3 service for the FileIO to access. This could be used to use S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | -| s3.access-key-id | admin | Configure the static access key id used to access the FileIO. | -| s3.secret-access-key | password | Configure the static secret access key used to access the FileIO. | -| s3.session-token | AQoDYXdzEJr... | Configure the static session token used to access the FileIO. | -| s3.role-session-name | session | An optional identifier for the assumed role session. | -| s3.role-arn | arn:aws:... | AWS Role ARN. If provided instead of access_key and secret_key, temporary credentials will be fetched by assuming this role. | -| s3.signer | bearer | Configure the signature version of the FileIO. | -| s3.signer.uri | | Configure the remote signing uri if it differs from the catalog uri. Remote signing is only implemented for `FsspecFileIO`. The final request is sent to `/`. | -| s3.signer.endpoint | v1/main/s3-sign | Configure the remote signing endpoint. Remote signing is only implemented for `FsspecFileIO`. The final request is sent to `/`. (default : v1/aws/s3/sign). | -| s3.region | us-west-2 | Configure the default region used to initialize an `S3FileSystem`. `PyArrowFileIO` attempts to automatically tries to resolve the region if this isn't set (only supported for AWS S3 Buckets). | -| s3.resolve-region | False | Only supported for `PyArrowFileIO`, when enabled, it will always try to resolve the location of the bucket (only supported for AWS S3 Buckets). | -| s3.proxy-uri | | Configure the proxy server to be used by the FileIO. | -| s3.connect-timeout | 60.0 | Configure socket connection timeout, in seconds. | -| s3.request-timeout | 60.0 | Configure socket read timeouts on Windows and macOS, in seconds. | -| s3.force-virtual-addressing | False | Whether to use virtual addressing of buckets. If true, then virtual addressing is always enabled. If false, then virtual addressing is only enabled if endpoint_override is empty. This can be used for non-AWS backends that only support virtual hosted-style access. | -| s3.retry-strategy-impl | None | Ability to set a custom S3 retry strategy. A full path to a class needs to be given that extends the [S3RetryStrategy](https://github.com/apache/arrow/blob/639201bfa412db26ce45e73851432018af6c945e/python/pyarrow/_s3fs.pyx#L110) base class. | +| Key | Example | Description | +|----------------------|----------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| s3.endpoint | | Configure an alternative endpoint of the S3 service for the FileIO to access. This could be used to use S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | +| s3.access-key-id | admin | Configure the static access key id used to access the FileIO. | +| s3.secret-access-key | password | Configure the static secret access key used to access the FileIO. | +| s3.session-token | AQoDYXdzEJr... | Configure the static session token used to access the FileIO. | +| s3.role-session-name | session | An optional identifier for the assumed role session. | +| s3.role-arn | arn:aws:... | AWS Role ARN. If provided instead of access_key and secret_key, temporary credentials will be fetched by assuming this role. | +| s3.signer | bearer | Configure the signature version of the FileIO. | +| s3.signer.uri | | Configure the remote signing uri if it differs from the catalog uri. Remote signing is only implemented for `FsspecFileIO`. The final request is sent to `/`. | +| s3.signer.endpoint | v1/main/s3-sign | Configure the remote signing endpoint. Remote signing is only implemented for `FsspecFileIO`. The final request is sent to `/`. (default : v1/aws/s3/sign). | +| s3.region | us-west-2 | Configure the default region used to initialize an `S3FileSystem`. `PyArrowFileIO` attempts to automatically resolve the region for each S3 bucket, falling back to this value if resolution fails. | +| s3.proxy-uri | | Configure the proxy server to be used by the FileIO. | +| s3.connect-timeout | 60.0 | Configure socket connection timeout, in seconds. | +| s3.request-timeout | 60.0 | Configure socket read timeouts on Windows and macOS, in seconds. | +| s3.force-virtual-addressing | False | Whether to use virtual addressing of buckets. If true, then virtual addressing is always enabled. If false, then virtual addressing is only enabled if endpoint_override is empty. This can be used for non-AWS backends that only support virtual hosted-style access. | @@ -147,20 +144,15 @@ For the FileIO there are several configuration options available: -| Key | Example | Description | -|------------------------------|---------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| adls.connection-string | AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqF...;BlobEndpoint= | A [connection string](https://learn.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string). This could be used to use FileIO with any adls-compatible object storage service that has a different endpoint (like [azurite](https://github.com/azure/azurite)). | -| adls.account-name | devstoreaccount1 | The account that you want to connect to | -| adls.account-key | Eby8vdM02xNOcqF... | The key to authentication against the account. | -| adls.sas-token | NuHOuuzdQN7VRM%2FOpOeqBlawRCA845IY05h9eu1Yte4%3D | The shared access signature | -| adls.tenant-id | ad667be4-b811-11ed-afa1-0242ac120002 | The tenant-id | -| adls.client-id | ad667be4-b811-11ed-afa1-0242ac120002 | The client-id | -| adls.client-secret | oCA3R6P\*ka#oa1Sms2J74z... | The client-secret | -| adls.account-host | accountname1.blob.core.windows.net | The storage account host. See [AzureBlobFileSystem](https://github.com/fsspec/adlfs/blob/adb9c53b74a0d420625b86dd00fbe615b43201d2/adlfs/spec.py#L125) for reference | -| adls.blob-storage-authority | .blob.core.windows.net | The hostname[:port] of the Blob Service. Defaults to `.blob.core.windows.net`. Useful for connecting to a local emulator, like [azurite](https://github.com/azure/azurite). See [AzureFileSystem](https://arrow.apache.org/docs/python/filesystems.html#azure-storage-file-system) for reference | -| adls.dfs-storage-authority | .dfs.core.windows.net | The hostname[:port] of the Data Lake Gen 2 Service. Defaults to `.dfs.core.windows.net`. Useful for connecting to a local emulator, like [azurite](https://github.com/azure/azurite). See [AzureFileSystem](https://arrow.apache.org/docs/python/filesystems.html#azure-storage-file-system) for reference | -| adls.blob-storage-scheme | https | Either `http` or `https`. Defaults to `https`. Useful for connecting to a local emulator, like [azurite](https://github.com/azure/azurite). See [AzureFileSystem](https://arrow.apache.org/docs/python/filesystems.html#azure-storage-file-system) for reference | -| adls.dfs-storage-scheme | https | Either `http` or `https`. Defaults to `https`. Useful for connecting to a local emulator, like [azurite](https://github.com/azure/azurite). See [AzureFileSystem](https://arrow.apache.org/docs/python/filesystems.html#azure-storage-file-system) for reference | +| Key | Example | Description | +| ---------------------- | ----------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| adls.connection-string | AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqF...;BlobEndpoint= | A [connection string](https://learn.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string). This could be used to use FileIO with any adls-compatible object storage service that has a different endpoint (like [azurite](https://github.com/azure/azurite)). | +| adls.account-name | devstoreaccount1 | The account that you want to connect to | +| adls.account-key | Eby8vdM02xNOcqF... | The key to authentication against the account. | +| adls.sas-token | NuHOuuzdQN7VRM%2FOpOeqBlawRCA845IY05h9eu1Yte4%3D | The shared access signature | +| adls.tenant-id | ad667be4-b811-11ed-afa1-0242ac120002 | The tenant-id | +| adls.client-id | ad667be4-b811-11ed-afa1-0242ac120002 | The client-id | +| adls.client-secret | oCA3R6P\*ka#oa1Sms2J74z... | The client-secret | @@ -196,18 +188,7 @@ PyIceberg uses [S3FileSystem](https://arrow.apache.org/docs/python/generated/pya | s3.access-key-id | admin | Configure the static access key id used to access the FileIO. | | s3.secret-access-key | password | Configure the static secret access key used to access the FileIO. | | s3.session-token | AQoDYXdzEJr... | Configure the static session token used to access the FileIO. | -| s3.force-virtual-addressing | True | Whether to use virtual addressing of buckets. This is set to `True` by default as OSS can only be accessed with virtual hosted style address. | - - - -### Hugging Face - - - -| Key | Example | Description | -| ----------- | ------------------------ | --------------------------------------------------------- | -| hf.endpoint | | Configure the endpoint for Hugging Face | -| hf.token | hf_xxx | The Hugging Face token to access HF Datasets repositories | +| s3.force-virtual-addressing | True | Whether to use virtual addressing of buckets. This must be set to True as OSS can only be accessed with virtual hosted style address. | @@ -231,7 +212,8 @@ Both data file and metadata file locations can be customized by configuring the For more granular control, you can override the `LocationProvider`'s `new_data_location` and `new_metadata_location` methods to define custom logic for generating file paths. See [`Loading a Custom Location Provider`](configuration.md#loading-a-custom-location-provider). -PyIceberg defaults to the [`SimpleLocationProvider`](configuration.md#simple-location-provider) for managing file paths. +PyIceberg defaults to the [`ObjectStoreLocationProvider`](configuration.md#object-store-location-provider), which generates file paths for +data files that are optimized for object storage. ### Simple Location Provider @@ -251,6 +233,9 @@ partitioned over a string column `category` might have a data file with location s3://bucket/ns/table/data/category=orders/0000-0-5affc076-96a4-48f2-9cd2-d5efbc9f0c94-00001.parquet ``` +The `SimpleLocationProvider` is enabled for a table by explicitly setting its `write.object-storage.enabled` table +property to `False`. + ### Object Store Location Provider PyIceberg offers the `ObjectStoreLocationProvider`, and an optional [partition-exclusion](configuration.md#partition-exclusion) @@ -269,8 +254,8 @@ For example, a table partitioned over a string column `category` might have a da s3://bucket/ns/table/data/0101/0110/1001/10110010/category=orders/0000-0-5affc076-96a4-48f2-9cd2-d5efbc9f0c94-00001.parquet ``` -The `ObjectStoreLocationProvider` is enabled for a table by explicitly setting its `write.object-storage.enabled` table -property to `True`. +The `write.object-storage.enabled` table property determines whether the `ObjectStoreLocationProvider` is enabled for a +table. It is used by default. #### Partition Exclusion @@ -339,186 +324,38 @@ catalog: | Key | Example | Description | | ------------------- | -------------------------------- | -------------------------------------------------------------------------------------------------- | -| uri | | URI identifying the REST Server | -| warehouse | myWarehouse | Warehouse location or identifier to request from the catalog service. May be used to determine server-side overrides, such as the warehouse location. | -| snapshot-loading-mode | refs | The snapshots to return in the body of the metadata. Setting the value to `all` would return the full set of snapshots currently valid for the table. Setting the value to `refs` would load all snapshots referenced by branches or tags. | -| `header.X-Iceberg-Access-Delegation` | `vended-credentials` | Signal to the server that the client supports delegated access via a comma-separated list of access mechanisms. The server may choose to supply access via any or none of the requested mechanisms. When using `vended-credentials`, the server provides temporary credentials to the client. When using `remote-signing`, the server signs requests on behalf of the client. (default: `vended-credentials`) | - -#### Headers in REST Catalog - -To configure custom headers in REST Catalog, include them in the catalog properties with `header.`. This -ensures that all HTTP requests to the REST service include the specified headers. - -```yaml -catalog: - default: - uri: http://rest-catalog/ws/ - credential: t-1234:secret - header.content-type: application/vnd.api+json -``` - -#### Authentication Options - -##### Legacy OAuth2 - -Legacy OAuth2 Properties will be removed in PyIceberg 1.0 in place of pluggable AuthManager properties below - -| Key | Example | Description | -| ------------------- | -------------------------------- | -------------------------------------------------------------------------------------------------- | -| oauth2-server-uri | | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | -| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | -| credential | client_id:client_secret | Credential to use for OAuth2 credential flow when initializing the catalog | +| uri | | URI identifying the REST Server | +| ugi | t-1234:secret | Hadoop UGI for Hive client. | +| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | +| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | | scope | openid offline corpds:ds:profile | Desired scope of the requested security token (default : catalog) | | resource | rest_catalog.iceberg.com | URI for the target resource or service | | audience | rest_catalog | Logical name of target resource or service | - -##### SigV4 - -| Key | Example | Description | -| ------------------- | -------------------------------- | -------------------------------------------------------------------------------------------------- | | rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | | rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | | rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | +| oauth2-server-uri | | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | -##### Pluggable Authentication via AuthManager - -The RESTCatalog supports pluggable authentication via the `auth` configuration block. This allows you to specify which how the access token will be fetched and managed for use with the HTTP requests to the RESTCatalog server. The authentication method is selected by setting the `auth.type` property, and additional configuration can be provided as needed for each method. - -###### Supported Authentication Types - -- `noop`: No authentication (no Authorization header sent). -- `basic`: HTTP Basic authentication. -- `custom`: Custom authentication manager (requires `auth.impl`). -- `google`: Google Authentication support + -###### Configuration Properties +#### Headers in RESTCatalog -The `auth` block is structured as follows: +To configure custom headers in RESTCatalog, include them in the catalog properties with the prefix `header.`. This +ensures that all HTTP requests to the REST service include the specified headers. ```yaml catalog: default: - type: rest uri: http://rest-catalog/ws/ - auth: - type: - : - # Type-specific configuration - impl: # Only for custom auth -``` - -###### Property Reference - -| Property | Required | Description | -|------------------|----------|-------------------------------------------------------------------------------------------------| -| `auth.type` | Yes | The authentication type to use (`noop`, `basic`, or `custom`). | -| `auth.impl` | Conditionally | The fully qualified class path for a custom AuthManager. Required if `auth.type` is `custom`. | -| `auth.basic` | If type is `basic` | Block containing `username` and `password` for HTTP Basic authentication. | -| `auth.custom` | If type is `custom` | Block containing configuration for the custom AuthManager. | -| `auth.google` | If type is `google` | Block containing `credentials_path` to a service account file (if using). Will default to using Application Default Credentials. | - -###### Examples - -No Authentication: - -```yaml -auth: - type: noop -``` - -Basic Authentication: - -```yaml -auth: - type: basic - basic: - username: myuser - password: mypass -``` - -Custom Authentication: - -```yaml -auth: - type: custom - impl: mypackage.module.MyAuthManager - custom: - property1: value1 - property2: value2 -``` - -###### Notes - -- If `auth.type` is `custom`, you **must** specify `auth.impl` with the full class path to your custom AuthManager. -- If `auth.type` is not `custom`, specifying `auth.impl` is not allowed. -- The configuration block under each type (e.g., `basic`, `custom`) is passed as keyword arguments to the corresponding AuthManager. - - - -#### Common Integrations & Examples - -##### AWS Glue - -```yaml -catalog: - s3_tables_catalog: - type: rest - uri: https://glue..amazonaws.com/iceberg - warehouse: :s3tablescatalog/ - rest.sigv4-enabled: true - rest.signing-name: glue - rest.signing-region: -``` - -##### Unity Catalog - -```yaml -catalog: - unity_catalog: - type: rest - uri: https:///api/2.1/unity-catalog/iceberg-rest - warehouse: - token: -``` - -##### R2 Data Catalog - -```yaml -catalog: - r2_catalog: - type: rest - uri: - warehouse: - token: -``` - -##### Lakekeeper - -```yaml -catalog: - lakekeeper_catalog: - type: rest - uri: - warehouse: - credential: : - oauth2-server-uri: http://localhost:30080/realms//protocol/openid-connect/token - scope: lakekeeper + credential: t-1234:secret + header.content-type: application/vnd.api+json ``` -##### Apache Polaris +Specific headers defined by the RESTCatalog spec include: -```yaml -catalog: - polaris_catalog: - type: rest - uri: https://.snowflakecomputing.com/polaris/api/catalog - warehouse: - credential: : - header.X-Iceberg-Access-Delegation: vended-credentials - scope: PRINCIPAL_ROLE:ALL - token-refresh-enabled: true - py-io-impl: pyiceberg.io.fsspec.FsspecFileIO -``` +| Key | Options | Default | Description | +| ------------------------------------ | ------------------------------------- | -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `header.X-Iceberg-Access-Delegation` | `{vended-credentials,remote-signing}` | `vended-credentials` | Signal to the server that the client supports delegated access via a comma-separated list of access mechanisms. The server may choose to supply access via any or none of the requested mechanisms | ### SQL Catalog @@ -585,12 +422,10 @@ catalog: s3.secret-access-key: password ``` -| Key | Example | Description | -|------------------------------| ------- | ------------------------------------ | -| hive.hive2-compatible | true | Using Hive 2.x compatibility mode | -| hive.kerberos-authentication | true | Using authentication via Kerberos | -| hive.kerberos-service-name | hive | Kerberos service name (default hive) | -| ugi | t-1234:secret | Hadoop UGI for Hive client. | +| Key | Example | Description | +|------------------------------| ------- | --------------------------------- | +| hive.hive2-compatible | true | Using Hive 2.x compatibility mode | +| hive.kerberos-authentication | true | Using authentication via Kerberos | When using Hive 2.x, make sure to set the compatibility flag: diff --git a/mkdocs/docs/contributing.md b/mkdocs/docs/contributing.md index 454da882c7..abce5700fc 100644 --- a/mkdocs/docs/contributing.md +++ b/mkdocs/docs/contributing.md @@ -37,10 +37,18 @@ The PyIceberg Project is hosted on GitHub at - -# Expression DSL - -The PyIceberg library provides a powerful expression Domain Specific Language (DSL) for building complex row filter expressions. This guide will help you understand how to use the expression DSL effectively. This DSL allows you to build type-safe expressions for use in the `row_filter` scan argument. - -They are composed of terms, predicates, and logical operators. - -## Basic Concepts - -### Terms - -Terms are the basic building blocks of expressions. They represent references to fields in your data: - -```python -from pyiceberg.expressions import Reference - -# Create a reference to a field named "age" -age_field = Reference("age") -``` - -### Predicates - -Predicates are expressions that evaluate to a boolean value. They can be combined using logical operators. - -#### Literal Predicates - -```python -from pyiceberg.expressions import EqualTo, NotEqualTo, LessThan, LessThanOrEqual, GreaterThan, GreaterThanOrEqual - -# age equals 18 -age_equals_18 = EqualTo("age", 18) - -# age is not equal to 18 -age_not_equals_18 = NotEqualTo("age", 18) - -# age is less than 18 -age_less_than_18 = LessThan("age", 18) - -# Less than or equal to -age_less_than_or_equal_18 = LessThanOrEqual("age", 18) - -# Greater than -age_greater_than_18 = GreaterThan("age", 18) - -# Greater than or equal to -age_greater_than_or_equal_18 = GreaterThanOrEqual("age", 18) -``` - -#### Set Predicates - -```python -from pyiceberg.expressions import In, NotIn - -# age is one of 18, 19, 20 -age_in_set = In("age", [18, 19, 20]) - -# age is not 18, 19, oer 20 -age_not_in_set = NotIn("age", [18, 19, 20]) -``` - -#### Unary Predicates - -```python -from pyiceberg.expressions import IsNull, NotNull - -# Is null -name_is_null = IsNull("name") - -# Is not null -name_is_not_null = NotNull("name") -``` - -#### String Predicates - -```python -from pyiceberg.expressions import StartsWith, NotStartsWith - -# TRUE for 'Johnathan', FALSE for 'Johan' -name_starts_with = StartsWith("name", "John") - -# FALSE for 'Johnathan', TRUE for 'Johan' -name_not_starts_with = NotStartsWith("name", "John") -``` - -### Logical Operators - -You can combine predicates using logical operators: - -```python -from pyiceberg.expressions import And, Or, Not - -# TRUE for 25, FALSE for 67 and 15 -age_between = And( - GreaterThanOrEqual("age", 18), - LessThanOrEqual("age", 65) -) - -# FALSE for 25, TRUE for 67 and 15 -age_outside = Or( - LessThan("age", 18), - GreaterThan("age", 65) -) - -# NOT operator -not_adult = Not(GreaterThanOrEqual("age", 18)) -``` - -## Advanced Usage - -### Complex Expressions - -You can build complex expressions by combining multiple predicates and operators: - -```python -from pyiceberg.expressions import And, Or, Not, EqualTo, GreaterThan, LessThan, In - -# (age >= 18 AND age <= 65) AND (status = 'active' OR status = 'pending') -complex_filter = And( - And( - GreaterThanOrEqual("age", 18), - LessThanOrEqual("age", 65) - ), - Or( - EqualTo("status", "active"), - EqualTo("status", "pending") - ) -) - -# NOT (age < 18 OR age > 65) -age_in_range = Not( - Or( - LessThan("age", 18), - GreaterThan("age", 65) - ) -) -``` - -## Best Practices - -1. **Use Type Hints**: Always use type hints when working with expressions to catch type-related errors early. - -2. **Break Down Complex Expressions**: For complex expressions, break them down into smaller, more manageable parts: - -```python -# Instead of this: -complex_filter = And( - And( - GreaterThanOrEqual("age", 18), - LessThanOrEqual("age", 65) - ), - Or( - EqualTo("status", "active"), - EqualTo("status", "pending") - ) -) - -# Do this: -age_range = And( - GreaterThanOrEqual("age", 18), - LessThanOrEqual("age", 65) -) - -status_filter = Or( - EqualTo("status", "active"), - EqualTo("status", "pending") -) - -complex_filter = And(age_range, status_filter) -``` - -## Common Pitfalls - -1. **Null Handling**: Be careful when using `IsNull` and `NotNull` predicates with required fields. The expression DSL will automatically optimize these cases: - - `IsNull` (and `IsNaN` for doubles/floats) on a required field will always return `False` - - `NotNull` (and `NotNaN` for doubles/floats) on a required field will always return `True` - -2. **String Comparisons**: When using string predicates like `StartsWith`, ensure that the field type is a string type. - -## Examples - -Here are some practical examples of using the expression DSL: - -### Basic Filtering - -```python -from datetime import datetime -from pyiceberg.expressions import ( - And, - EqualTo, - GreaterThanOrEqual, - LessThanOrEqual, - GreaterThan, - In -) - -active_adult_users_filter = And( - EqualTo("status", "active"), - GreaterThanOrEqual("age", 18) -) - - -high_value_customers = And( - GreaterThan("total_spent", 1000), - In("membership_level", ["gold", "platinum"]) -) - -date_range_filter = And( - GreaterThanOrEqual("created_at", datetime(2024, 1, 1)), - LessThanOrEqual("created_at", datetime(2024, 12, 31)) -) -``` - -### Multi-Condition Filter - -```python -from pyiceberg.expressions import And, Or, Not, EqualTo, GreaterThan - -complex_filter = And( - Not(EqualTo("status", "deleted")), - Or( - And( - EqualTo("type", "premium"), - GreaterThan("subscription_months", 12) - ), - EqualTo("type", "enterprise") - ) -) -``` diff --git a/mkdocs/docs/how-to-release.md b/mkdocs/docs/how-to-release.md index 1747c08348..4af3a212fe 100644 --- a/mkdocs/docs/how-to-release.md +++ b/mkdocs/docs/how-to-release.md @@ -379,8 +379,6 @@ Then, select the previous release version as the **Previous tag** to use the dif **Set as the latest release** and **Publish**. -Make sure to check the `changelog` label on GitHub to see if anything needs to be highlighted. - ### Release the docs Run the [`Release Docs` Github Action](https://github.com/apache/iceberg-python/actions/workflows/python-release-docs.yml). diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index 5f4ed6cc31..2becd10c81 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -52,7 +52,6 @@ You can mix and match optional dependencies depending on your needs: | pandas | Installs both PyArrow and Pandas | | duckdb | Installs both PyArrow and DuckDB | | ray | Installs PyArrow, Pandas, and Ray | -| bodo | Installs Bodo | | daft | Installs Daft | | polars | Installs Polars | | s3fs | S3FS as a FileIO implementation to interact with the object store | @@ -65,7 +64,7 @@ You either need to install `s3fs`, `adlfs`, `gcsfs`, or `pyarrow` to be able to ## Connecting to a catalog -Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/terms/#catalog). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. +Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/concepts/catalog/). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. For the sake of demonstration, we'll configure the catalog to use the `SqlCatalog` implementation, which will store information in a local `sqlite` database. We'll also configure the catalog to store data files in the local filesystem instead of an object store. This should not be used in production due to the limited scalability. diff --git a/mkdocs/docs/row-filter-syntax.md b/mkdocs/docs/row-filter-syntax.md deleted file mode 100644 index 2191b9fd4a..0000000000 --- a/mkdocs/docs/row-filter-syntax.md +++ /dev/null @@ -1,175 +0,0 @@ - - -# Row Filter Syntax - -In addition to the primary [Expression DSL](expression-dsl.md), PyIceberg provides a string-based statement interface for filtering rows in Iceberg tables. This guide explains the syntax and provides examples for supported operations. - -The row filter syntax is designed to be similar to SQL WHERE clauses. Here are the basic components: - -## Column References - -Columns can be referenced using either unquoted or quoted identifiers: - -```sql -column_name -"column.name" -``` - -## Literals - -The following literal types are supported: - -- Strings: `'hello world'` -- Numbers: `42`, `-42`, `3.14` -- Booleans: `true`, `false` (case insensitive) - -## Comparison Operations - -### Basic Comparisons - -```sql -column = 42 -column != 42 -column > 42 -column >= 42 -column < 42 -column <= 42 -``` - -!!! note - The `==` operator is an alias for `=` and `<>` is an alias for `!=` - -### String Comparisons - -```sql -column = 'hello' -column != 'world' -``` - -## NULL Checks - -Check for NULL values using the `IS NULL` and `IS NOT NULL` operators: - -```sql -column IS NULL -column IS NOT NULL -``` - -## NaN Checks - -For floating-point columns, you can check for NaN values: - -```sql -column IS NAN -column IS NOT NAN -``` - -## IN and NOT IN - -Check if a value is in a set of values: - -```sql -column IN ('a', 'b', 'c') -column NOT IN (1, 2, 3) -``` - -## LIKE Operations - -The LIKE operator supports pattern matching with a wildcard `%` at the end of the string: - -```sql -column LIKE 'prefix%' -column NOT LIKE 'prefix%' -``` - -!!! important - The `%` wildcard is only supported at the end of the pattern. Using it in the middle or beginning of the pattern will raise an error. - -## Logical Operations - -Combine multiple conditions using logical operators: - -```sql -column1 = 42 AND column2 = 'hello' -column1 > 0 OR column2 IS NULL -NOT (column1 = 42) -``` - -!!! tip - Parentheses can be used to group logical operations for clarity: - ```sql - (column1 = 42 AND column2 = 'hello') OR column3 IS NULL - ``` - -## Complete Examples - -Here are some complete examples showing how to combine different operations: - -```sql --- Complex filter with multiple conditions -status = 'active' AND age > 18 AND NOT (country IN ('US', 'CA')) - --- Filter with string pattern matching -name LIKE 'John%' AND age >= 21 - --- Filter with NULL checks and numeric comparisons -price IS NOT NULL AND price > 100 AND quantity > 0 - --- Filter with multiple logical operations -(status = 'pending' OR status = 'processing') AND NOT (priority = 'low') -``` - -## Common Pitfalls - -1. **String Quoting**: Always use single quotes for string literals. Double quotes are reserved for column identifiers. - - ```sql - -- Correct - name = 'John' - - -- Incorrect - name = "John" - ``` - -2. **Wildcard Usage**: The `%` wildcard in LIKE patterns can only appear at the end. - - ```sql - -- Correct - name LIKE 'John%' - - -- Incorrect (will raise an error) - name LIKE '%John%' - ``` - -3. **Case Sensitivity**: Boolean literals (`true`/`false`) are case insensitive, but string comparisons are case sensitive. - - ```sql - -- All valid - is_active = true - is_active = TRUE - is_active = True - - -- Case sensitive - status = 'Active' -- Will not match 'active' - ``` - -## Best Practices - -1. For complex use cases, use the primary [Expression DSL](expression-dsl.md) -2. When using multiple conditions, consider the order of operations (NOT > AND > OR) -3. For string comparisons, be consistent with case usage diff --git a/poetry.lock b/poetry.lock index 977a8e5ad5..f0a2f86c92 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "adlfs" @@ -6,8 +6,6 @@ version = "2024.12.0" description = "Access Azure Datalake Gen1 with fsspec and dask" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\"" files = [ {file = "adlfs-2024.12.0-py3-none-any.whl", hash = "sha256:00aab061ddec0413b2039487e656b62e01ece8ef1ca0493f76034a596cf069e3"}, {file = "adlfs-2024.12.0.tar.gz", hash = "sha256:04582bf7461a57365766d01a295a0a88b2b8c42c4fea06e2d673f62675cac5c6"}, @@ -27,144 +25,136 @@ tests = ["arrow", "dask[dataframe]", "docker", "pytest", "pytest-mock"] [[package]] name = "aiobotocore" -version = "2.24.0" +version = "2.19.0" description = "Async client for aws services using botocore and aiohttp" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"s3fs\"" +python-versions = ">=3.8" files = [ - {file = "aiobotocore-2.24.0-py3-none-any.whl", hash = "sha256:72bb1f8eb1b962779a95e1bcc9cf35bc33196ad763b622a40ae7fa9d2e95c87c"}, - {file = "aiobotocore-2.24.0.tar.gz", hash = "sha256:b32c0c45d38c22a18ce395a0b5448606c5260603296a152895b5bdb40ab3139d"}, + {file = "aiobotocore-2.19.0-py3-none-any.whl", hash = "sha256:12c2960a21472b8eb3452cde5eb31d541ca1464d236f4221556320fa8aed2ee8"}, + {file = "aiobotocore-2.19.0.tar.gz", hash = "sha256:552d5756989621b5274f1b4a4840cd76ae83dd930d0b1839af6443743a893faf"}, ] [package.dependencies] aiohttp = ">=3.9.2,<4.0.0" aioitertools = ">=0.5.1,<1.0.0" -botocore = ">=1.39.9,<1.39.12" +botocore = ">=1.36.0,<1.36.4" jmespath = ">=0.7.1,<2.0.0" multidict = ">=6.0.0,<7.0.0" python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, +] wrapt = ">=1.10.10,<2.0.0" [package.extras] -awscli = ["awscli (>=1.41.9,<1.41.12)"] -boto3 = ["boto3 (>=1.39.9,<1.39.12)"] -httpx = ["httpx (>=0.25.1,<0.29)"] +awscli = ["awscli (>=1.37.0,<1.37.4)"] +boto3 = ["boto3 (>=1.36.0,<1.36.4)"] [[package]] name = "aiohappyeyeballs" -version = "2.6.1" +version = "2.4.6" description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\"" files = [ - {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, - {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, + {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, + {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, ] [[package]] name = "aiohttp" -version = "3.12.14" +version = "3.11.12" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\"" -files = [ - {file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248"}, - {file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb"}, - {file = "aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"}, - {file = "aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"}, - {file = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"}, - {file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597"}, - {file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393"}, - {file = "aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28"}, - {file = "aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b"}, - {file = "aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced"}, - {file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22"}, - {file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a"}, - {file = "aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"}, - {file = "aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"}, - {file = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"}, - {file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767"}, - {file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e"}, - {file = "aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758"}, - {file = "aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5"}, - {file = "aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa"}, - {file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae"}, - {file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b"}, - {file = "aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf"}, - {file = "aiohttp-3.12.14-cp39-cp39-win32.whl", hash = "sha256:a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0"}, - {file = "aiohttp-3.12.14-cp39-cp39-win_amd64.whl", hash = "sha256:196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f"}, - {file = "aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2"}, +files = [ + {file = "aiohttp-3.11.12-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aa8a8caca81c0a3e765f19c6953416c58e2f4cc1b84829af01dd1c771bb2f91f"}, + {file = "aiohttp-3.11.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:84ede78acde96ca57f6cf8ccb8a13fbaf569f6011b9a52f870c662d4dc8cd854"}, + {file = "aiohttp-3.11.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:584096938a001378484aa4ee54e05dc79c7b9dd933e271c744a97b3b6f644957"}, + {file = "aiohttp-3.11.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:392432a2dde22b86f70dd4a0e9671a349446c93965f261dbaecfaf28813e5c42"}, + {file = "aiohttp-3.11.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:88d385b8e7f3a870146bf5ea31786ef7463e99eb59e31db56e2315535d811f55"}, + {file = "aiohttp-3.11.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b10a47e5390c4b30a0d58ee12581003be52eedd506862ab7f97da7a66805befb"}, + {file = "aiohttp-3.11.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5263dcede17b6b0c41ef0c3ccce847d82a7da98709e75cf7efde3e9e3b5cae"}, + {file = "aiohttp-3.11.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50c5c7b8aa5443304c55c262c5693b108c35a3b61ef961f1e782dd52a2f559c7"}, + {file = "aiohttp-3.11.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1c031a7572f62f66f1257db37ddab4cb98bfaf9b9434a3b4840bf3560f5e788"}, + {file = "aiohttp-3.11.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:7e44eba534381dd2687be50cbd5f2daded21575242ecfdaf86bbeecbc38dae8e"}, + {file = "aiohttp-3.11.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:145a73850926018ec1681e734cedcf2716d6a8697d90da11284043b745c286d5"}, + {file = "aiohttp-3.11.12-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2c311e2f63e42c1bf86361d11e2c4a59f25d9e7aabdbdf53dc38b885c5435cdb"}, + {file = "aiohttp-3.11.12-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ea756b5a7bac046d202a9a3889b9a92219f885481d78cd318db85b15cc0b7bcf"}, + {file = "aiohttp-3.11.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:526c900397f3bbc2db9cb360ce9c35134c908961cdd0ac25b1ae6ffcaa2507ff"}, + {file = "aiohttp-3.11.12-cp310-cp310-win32.whl", hash = "sha256:b8d3bb96c147b39c02d3db086899679f31958c5d81c494ef0fc9ef5bb1359b3d"}, + {file = "aiohttp-3.11.12-cp310-cp310-win_amd64.whl", hash = "sha256:7fe3d65279bfbee8de0fb4f8c17fc4e893eed2dba21b2f680e930cc2b09075c5"}, + {file = "aiohttp-3.11.12-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87a2e00bf17da098d90d4145375f1d985a81605267e7f9377ff94e55c5d769eb"}, + {file = "aiohttp-3.11.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b34508f1cd928ce915ed09682d11307ba4b37d0708d1f28e5774c07a7674cac9"}, + {file = "aiohttp-3.11.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:936d8a4f0f7081327014742cd51d320296b56aa6d324461a13724ab05f4b2933"}, + {file = "aiohttp-3.11.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de1378f72def7dfb5dbd73d86c19eda0ea7b0a6873910cc37d57e80f10d64e1"}, + {file = "aiohttp-3.11.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9d45dbb3aaec05cf01525ee1a7ac72de46a8c425cb75c003acd29f76b1ffe94"}, + {file = "aiohttp-3.11.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:930ffa1925393381e1e0a9b82137fa7b34c92a019b521cf9f41263976666a0d6"}, + {file = "aiohttp-3.11.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8340def6737118f5429a5df4e88f440746b791f8f1c4ce4ad8a595f42c980bd5"}, + {file = "aiohttp-3.11.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4016e383f91f2814e48ed61e6bda7d24c4d7f2402c75dd28f7e1027ae44ea204"}, + {file = "aiohttp-3.11.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c0600bcc1adfaaac321422d615939ef300df81e165f6522ad096b73439c0f58"}, + {file = "aiohttp-3.11.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:0450ada317a65383b7cce9576096150fdb97396dcfe559109b403c7242faffef"}, + {file = "aiohttp-3.11.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:850ff6155371fd802a280f8d369d4e15d69434651b844bde566ce97ee2277420"}, + {file = "aiohttp-3.11.12-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8fd12d0f989c6099e7b0f30dc6e0d1e05499f3337461f0b2b0dadea6c64b89df"}, + {file = "aiohttp-3.11.12-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:76719dd521c20a58a6c256d058547b3a9595d1d885b830013366e27011ffe804"}, + {file = "aiohttp-3.11.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:97fe431f2ed646a3b56142fc81d238abcbaff08548d6912acb0b19a0cadc146b"}, + {file = "aiohttp-3.11.12-cp311-cp311-win32.whl", hash = "sha256:e10c440d142fa8b32cfdb194caf60ceeceb3e49807072e0dc3a8887ea80e8c16"}, + {file = "aiohttp-3.11.12-cp311-cp311-win_amd64.whl", hash = "sha256:246067ba0cf5560cf42e775069c5d80a8989d14a7ded21af529a4e10e3e0f0e6"}, + {file = "aiohttp-3.11.12-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e392804a38353900c3fd8b7cacbea5132888f7129f8e241915e90b85f00e3250"}, + {file = "aiohttp-3.11.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8fa1510b96c08aaad49303ab11f8803787c99222288f310a62f493faf883ede1"}, + {file = "aiohttp-3.11.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc065a4285307607df3f3686363e7f8bdd0d8ab35f12226362a847731516e42c"}, + {file = "aiohttp-3.11.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddb31f8474695cd61fc9455c644fc1606c164b93bff2490390d90464b4655df"}, + {file = "aiohttp-3.11.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dec0000d2d8621d8015c293e24589d46fa218637d820894cb7356c77eca3259"}, + {file = "aiohttp-3.11.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3552fe98e90fdf5918c04769f338a87fa4f00f3b28830ea9b78b1bdc6140e0d"}, + {file = "aiohttp-3.11.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dfe7f984f28a8ae94ff3a7953cd9678550dbd2a1f9bda5dd9c5ae627744c78e"}, + {file = "aiohttp-3.11.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a481a574af914b6e84624412666cbfbe531a05667ca197804ecc19c97b8ab1b0"}, + {file = "aiohttp-3.11.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1987770fb4887560363b0e1a9b75aa303e447433c41284d3af2840a2f226d6e0"}, + {file = "aiohttp-3.11.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:a4ac6a0f0f6402854adca4e3259a623f5c82ec3f0c049374133bcb243132baf9"}, + {file = "aiohttp-3.11.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c96a43822f1f9f69cc5c3706af33239489a6294be486a0447fb71380070d4d5f"}, + {file = "aiohttp-3.11.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a5e69046f83c0d3cb8f0d5bd9b8838271b1bc898e01562a04398e160953e8eb9"}, + {file = "aiohttp-3.11.12-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:68d54234c8d76d8ef74744f9f9fc6324f1508129e23da8883771cdbb5818cbef"}, + {file = "aiohttp-3.11.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9fd9dcf9c91affe71654ef77426f5cf8489305e1c66ed4816f5a21874b094b9"}, + {file = "aiohttp-3.11.12-cp312-cp312-win32.whl", hash = "sha256:0ed49efcd0dc1611378beadbd97beb5d9ca8fe48579fc04a6ed0844072261b6a"}, + {file = "aiohttp-3.11.12-cp312-cp312-win_amd64.whl", hash = "sha256:54775858c7f2f214476773ce785a19ee81d1294a6bedc5cc17225355aab74802"}, + {file = "aiohttp-3.11.12-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:413ad794dccb19453e2b97c2375f2ca3cdf34dc50d18cc2693bd5aed7d16f4b9"}, + {file = "aiohttp-3.11.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a93d28ed4b4b39e6f46fd240896c29b686b75e39cc6992692e3922ff6982b4c"}, + {file = "aiohttp-3.11.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d589264dbba3b16e8951b6f145d1e6b883094075283dafcab4cdd564a9e353a0"}, + {file = "aiohttp-3.11.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5148ca8955affdfeb864aca158ecae11030e952b25b3ae15d4e2b5ba299bad2"}, + {file = "aiohttp-3.11.12-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:525410e0790aab036492eeea913858989c4cb070ff373ec3bc322d700bdf47c1"}, + {file = "aiohttp-3.11.12-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bd8695be2c80b665ae3f05cb584093a1e59c35ecb7d794d1edd96e8cc9201d7"}, + {file = "aiohttp-3.11.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0203433121484b32646a5f5ea93ae86f3d9559d7243f07e8c0eab5ff8e3f70e"}, + {file = "aiohttp-3.11.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40cd36749a1035c34ba8d8aaf221b91ca3d111532e5ccb5fa8c3703ab1b967ed"}, + {file = "aiohttp-3.11.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7442662afebbf7b4c6d28cb7aab9e9ce3a5df055fc4116cc7228192ad6cb484"}, + {file = "aiohttp-3.11.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:8a2fb742ef378284a50766e985804bd6adb5adb5aa781100b09befdbfa757b65"}, + {file = "aiohttp-3.11.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2cee3b117a8d13ab98b38d5b6bdcd040cfb4181068d05ce0c474ec9db5f3c5bb"}, + {file = "aiohttp-3.11.12-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f6a19bcab7fbd8f8649d6595624856635159a6527861b9cdc3447af288a00c00"}, + {file = "aiohttp-3.11.12-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e4cecdb52aaa9994fbed6b81d4568427b6002f0a91c322697a4bfcc2b2363f5a"}, + {file = "aiohttp-3.11.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:30f546358dfa0953db92ba620101fefc81574f87b2346556b90b5f3ef16e55ce"}, + {file = "aiohttp-3.11.12-cp313-cp313-win32.whl", hash = "sha256:ce1bb21fc7d753b5f8a5d5a4bae99566386b15e716ebdb410154c16c91494d7f"}, + {file = "aiohttp-3.11.12-cp313-cp313-win_amd64.whl", hash = "sha256:f7914ab70d2ee8ab91c13e5402122edbc77821c66d2758abb53aabe87f013287"}, + {file = "aiohttp-3.11.12-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c3623053b85b4296cd3925eeb725e386644fd5bc67250b3bb08b0f144803e7b"}, + {file = "aiohttp-3.11.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:67453e603cea8e85ed566b2700efa1f6916aefbc0c9fcb2e86aaffc08ec38e78"}, + {file = "aiohttp-3.11.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6130459189e61baac5a88c10019b21e1f0c6d00ebc770e9ce269475650ff7f73"}, + {file = "aiohttp-3.11.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9060addfa4ff753b09392efe41e6af06ea5dd257829199747b9f15bfad819460"}, + {file = "aiohttp-3.11.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34245498eeb9ae54c687a07ad7f160053911b5745e186afe2d0c0f2898a1ab8a"}, + {file = "aiohttp-3.11.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dc0fba9a74b471c45ca1a3cb6e6913ebfae416678d90529d188886278e7f3f6"}, + {file = "aiohttp-3.11.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a478aa11b328983c4444dacb947d4513cb371cd323f3845e53caeda6be5589d5"}, + {file = "aiohttp-3.11.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c160a04283c8c6f55b5bf6d4cad59bb9c5b9c9cd08903841b25f1f7109ef1259"}, + {file = "aiohttp-3.11.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:edb69b9589324bdc40961cdf0657815df674f1743a8d5ad9ab56a99e4833cfdd"}, + {file = "aiohttp-3.11.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ee84c2a22a809c4f868153b178fe59e71423e1f3d6a8cd416134bb231fbf6d3"}, + {file = "aiohttp-3.11.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bf4480a5438f80e0f1539e15a7eb8b5f97a26fe087e9828e2c0ec2be119a9f72"}, + {file = "aiohttp-3.11.12-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:e6b2732ef3bafc759f653a98881b5b9cdef0716d98f013d376ee8dfd7285abf1"}, + {file = "aiohttp-3.11.12-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f752e80606b132140883bb262a457c475d219d7163d996dc9072434ffb0784c4"}, + {file = "aiohttp-3.11.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ab3247d58b393bda5b1c8f31c9edece7162fc13265334217785518dd770792b8"}, + {file = "aiohttp-3.11.12-cp39-cp39-win32.whl", hash = "sha256:0d5176f310a7fe6f65608213cc74f4228e4f4ce9fd10bcb2bb6da8fc66991462"}, + {file = "aiohttp-3.11.12-cp39-cp39-win_amd64.whl", hash = "sha256:74bd573dde27e58c760d9ca8615c41a57e719bff315c9adb6f2a4281a28e8798"}, + {file = "aiohttp-3.11.12.tar.gz", hash = "sha256:7603ca26d75b1b86160ce1bbe2787a0b706e592af5b2504e12caa88a217767b0"}, ] [package.dependencies] -aiohappyeyeballs = ">=2.5.0" -aiosignal = ">=1.4.0" +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" @@ -173,7 +163,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aioitertools" @@ -181,8 +171,6 @@ version = "0.12.0" description = "itertools and builtins for AsyncIO and mixed iterables" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"s3fs\"" files = [ {file = "aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796"}, {file = "aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b"}, @@ -197,20 +185,17 @@ docs = ["sphinx (==8.0.2)", "sphinx-mdinclude (==0.6.2)"] [[package]] name = "aiosignal" -version = "1.4.0" +version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "(extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\") and (extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\" or extra == \"ray\")" files = [ - {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, - {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [package.dependencies] frozenlist = ">=1.1.0" -typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} [[package]] name = "alabaster" @@ -218,7 +203,6 @@ version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -230,7 +214,6 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -242,7 +225,6 @@ version = "4.13.2" description = "ANTLR 4.13.2 runtime for Python 3" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "antlr4_python3_runtime-4.13.2-py3-none-any.whl", hash = "sha256:fe3835eb8d33daece0e799090eda89719dbccee7aa39ef94eed3818cafa5a7e8"}, {file = "antlr4_python3_runtime-4.13.2.tar.gz", hash = "sha256:909b647e1d2fc2b70180ac586df3933e38919c85f98ccc656a96cd3f25ef3916"}, @@ -254,8 +236,6 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "(extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\") and python_version <= \"3.10\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -263,45 +243,42 @@ files = [ [[package]] name = "attrs" -version = "25.3.0" +version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] -markers = {main = "(extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\") and (extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\" or extra == \"ray\")"} [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "aws-sam-translator" -version = "1.99.0" +version = "1.94.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = "!=4.0,<=4.0,>=3.8" -groups = ["dev"] files = [ - {file = "aws_sam_translator-1.99.0-py3-none-any.whl", hash = "sha256:b1997e09da876342655eb568e66098280ffd137213009f0136b57f4e7694c98c"}, - {file = "aws_sam_translator-1.99.0.tar.gz", hash = "sha256:be326054a7ee2f535fcd914db85e5d50bdf4054313c14888af69b6de3187cdf8"}, + {file = "aws_sam_translator-1.94.0-py3-none-any.whl", hash = "sha256:100e33eeffcfa81f7c45cadeb0ee29596ce829f6b4d2745140f04fa19a41f539"}, + {file = "aws_sam_translator-1.94.0.tar.gz", hash = "sha256:8ec258d9f7ece72ef91c81f4edb45a2db064c16844b6afac90c575893beaa391"}, ] [package.dependencies] -boto3 = ">=1.34.0,<2.0.0" +boto3 = ">=1.19.5,<2.dev0" jsonschema = ">=3.2,<5" pydantic = ">=1.8,<1.10.15 || >1.10.15,<1.10.17 || >1.10.17,<3" -typing_extensions = ">=4.4" +typing-extensions = ">=4.4" [package.extras] -dev = ["black (==24.3.0)", "boto3 (>=1.34.0,<2.0.0)", "boto3-stubs[appconfig,serverlessrepo] (>=1.34.0,<2.0.0)", "cloudformation-cli (>=0.2.39,<0.3.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (>=0.4.5,<0.5.0)", "tenacity (>=9.0,<10.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==24.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (>=0.4.5,<0.5.0)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" @@ -309,7 +286,6 @@ version = "2.14.0" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "aws_xray_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:cfbe6feea3d26613a2a869d14c9246a844285c97087ad8f296f901633554ad94"}, {file = "aws_xray_sdk-2.14.0.tar.gz", hash = "sha256:aab843c331af9ab9ba5cefb3a303832a19db186140894a523edafc024cc0493c"}, @@ -321,15 +297,13 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.35.0" +version = "1.32.0" description = "Microsoft Azure Core Library for Python" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\"" +python-versions = ">=3.8" files = [ - {file = "azure_core-1.35.0-py3-none-any.whl", hash = "sha256:8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1"}, - {file = "azure_core-1.35.0.tar.gz", hash = "sha256:c0be528489485e9ede59b6971eb63c1eaacf83ef53001bfe3904e475e972be5c"}, + {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, + {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, ] [package.dependencies] @@ -339,7 +313,6 @@ typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] -tracing = ["opentelemetry-api (>=1.26,<2.0)"] [[package]] name = "azure-datalake-store" @@ -347,8 +320,6 @@ version = "0.0.53" description = "Azure Data Lake Store Filesystem Client Library for Python" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"adlfs\"" files = [ {file = "azure-datalake-store-0.0.53.tar.gz", hash = "sha256:05b6de62ee3f2a0a6e6941e6933b792b800c3e7f6ffce2fc324bc19875757393"}, {file = "azure_datalake_store-0.0.53-py2.py3-none-any.whl", hash = "sha256:a30c902a6e360aa47d7f69f086b426729784e71c536f330b691647a51dc42b2b"}, @@ -361,15 +332,13 @@ requests = ">=2.20.0" [[package]] name = "azure-identity" -version = "1.23.0" +version = "1.20.0" description = "Microsoft Azure Identity Library for Python" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\"" +python-versions = ">=3.8" files = [ - {file = "azure_identity-1.23.0-py3-none-any.whl", hash = "sha256:dbbeb64b8e5eaa81c44c565f264b519ff2de7ff0e02271c49f3cb492762a50b0"}, - {file = "azure_identity-1.23.0.tar.gz", hash = "sha256:d9cdcad39adb49d4bb2953a217f62aec1f65bbb3c63c9076da2be2a47e53dde4"}, + {file = "azure_identity-1.20.0-py3-none-any.whl", hash = "sha256:5f23fc4889a66330e840bd78830287e14f3761820fe3c5f77ac875edcb9ec998"}, + {file = "azure_identity-1.20.0.tar.gz", hash = "sha256:40597210d56c83e15031b0fe2ea3b26420189e1e7f3e20bdbb292315da1ba014"}, ] [package.dependencies] @@ -381,15 +350,13 @@ typing-extensions = ">=4.0.0" [[package]] name = "azure-storage-blob" -version = "12.25.1" +version = "12.24.1" description = "Microsoft Azure Blob Storage Client Library for Python" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"adlfs\"" files = [ - {file = "azure_storage_blob-12.25.1-py3-none-any.whl", hash = "sha256:1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167"}, - {file = "azure_storage_blob-12.25.1.tar.gz", hash = "sha256:4f294ddc9bc47909ac66b8934bd26b50d2000278b10ad82cc109764fdc6e0e3b"}, + {file = "azure_storage_blob-12.24.1-py3-none-any.whl", hash = "sha256:77fb823fdbac7f3c11f7d86a5892e2f85e161e8440a7489babe2195bf248f09e"}, + {file = "azure_storage_blob-12.24.1.tar.gz", hash = "sha256:052b2a1ea41725ba12e2f4f17be85a54df1129e13ea0321f5a2fcc851cbf47d4"}, ] [package.dependencies] @@ -407,14 +374,13 @@ version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "backports-tarfile" @@ -422,8 +388,6 @@ version = "1.2.0" description = "Backport of CPython tarfile module" optional = false python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version < \"3.12\"" files = [ {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, @@ -433,127 +397,46 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] -[[package]] -name = "backrefs" -version = "5.9" -description = "A wrapper around re and regex that adds additional back references." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -files = [ - {file = "backrefs-5.9-py310-none-any.whl", hash = "sha256:db8e8ba0e9de81fcd635f440deab5ae5f2591b54ac1ebe0550a2ca063488cd9f"}, - {file = "backrefs-5.9-py311-none-any.whl", hash = "sha256:6907635edebbe9b2dc3de3a2befff44d74f30a4562adbb8b36f21252ea19c5cf"}, - {file = "backrefs-5.9-py312-none-any.whl", hash = "sha256:7fdf9771f63e6028d7fee7e0c497c81abda597ea45d6b8f89e8ad76994f5befa"}, - {file = "backrefs-5.9-py313-none-any.whl", hash = "sha256:cc37b19fa219e93ff825ed1fed8879e47b4d89aa7a1884860e2db64ccd7c676b"}, - {file = "backrefs-5.9-py314-none-any.whl", hash = "sha256:df5e169836cc8acb5e440ebae9aad4bf9d15e226d3bad049cf3f6a5c20cc8dc9"}, - {file = "backrefs-5.9-py39-none-any.whl", hash = "sha256:f48ee18f6252b8f5777a22a00a09a85de0ca931658f1dd96d4406a34f3748c60"}, - {file = "backrefs-5.9.tar.gz", hash = "sha256:808548cb708d66b82ee231f962cb36faaf4f2baab032f2fbb783e9c2fdddaa59"}, -] - -[package.extras] -extras = ["regex"] - [[package]] name = "blinker" version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, ] -[[package]] -name = "bodo" -version = "2025.8.1" -description = "High-Performance Python Compute Engine for Data and AI" -optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"bodo\"" -files = [ - {file = "bodo-2025.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:fecf784bc216c7d691a73be6e0f3607d3d577ab7b9fd7bafbe3d330e12a99baf"}, - {file = "bodo-2025.8.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:a0bdfff8f2586aa8c7292f734478a4deadc813a30e4a28338f3fec9e550b6204"}, - {file = "bodo-2025.8.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd0f052355146d13db48102a3120b3d8c6a9234af01c1f5386148e7f3b5860c5"}, - {file = "bodo-2025.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:5ae3f1f71ba306f8c6b32b14467e40710cdaaf21e549c08d8a7f187d3e41c3d6"}, - {file = "bodo-2025.8.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:f69bcfbbb53a2adb35b6a20a8f64e872ca46a37e6131c7770c7c7dbbc2d3a95c"}, - {file = "bodo-2025.8.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:34e05ec454bd90c51da4eb4134f52556afc695b45f94c069118ea5d49bd0e589"}, - {file = "bodo-2025.8.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7cf9eb866ff71f18451840ba85adc1954a937fcac8b1aa202510058b90d04ef6"}, - {file = "bodo-2025.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:67a2135c57e5ef8ba465f5236eecaf39b71687f99d54ef5f1a582a0fc7a65cbf"}, - {file = "bodo-2025.8.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:0f9c9fcd66f020639d21dd258d5fd9d1d106e7291d037c3cb00e83a829ca2748"}, - {file = "bodo-2025.8.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:19ec80985c6d05dcad4a5920c40da25af24d1bf5e206d2c6306f95beacc259da"}, - {file = "bodo-2025.8.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0dca0c2a983f5e53085b722e20b28a39b67a1e417589a6182a396e5799bde643"}, - {file = "bodo-2025.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:97b3c8aab165918779f04a70aeebdd2b389032b230c0982ff1b8b1cc234628d5"}, - {file = "bodo-2025.8.1-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:75a3e5b41543d399e84e380701346df23212409910d3da9dc09eff03790e74e9"}, - {file = "bodo-2025.8.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:837dabf34814707f1225d3f879c7e7c0f3c6113e890b57161f934b9ee24a7183"}, - {file = "bodo-2025.8.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee7f7a7937b91ce8bfa7e8442981e2e7367a0cdb1d364c2ba6cb5ed9bd811d40"}, - {file = "bodo-2025.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:3767a06b91f9105c03cf9b797b2a032013da02679ff3a0f029e7903973646c16"}, - {file = "bodo-2025.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:644ef16d4bdf7b00ccacbe0f9605f0e3b5411d07e6079d332d34cbed3c037646"}, - {file = "bodo-2025.8.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4125d5b9048bcd41444795820f974e6d8b44b4e6be3f923e7ef380af32360e"}, - {file = "bodo-2025.8.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b8df8f4599501348c8f739a274ef005ca150856b7cf078c17f04d8edd89d32d"}, - {file = "bodo-2025.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:447e04d079e7f513f9866152f8357899e0e9abf023d075b0091f4437bb825551"}, -] - -[package.dependencies] -cloudpickle = ">=3.0" -fsspec = ">=2021.09" -impi-rt = {version = "*", markers = "sys_platform == \"win32\""} -numba = ">=0.60" -numpy = ">=1.24" -pandas = ">=2.2" -psutil = "*" -pyarrow = ">=19.0,<19.1" -requests = "*" - -[package.extras] -adlfs = ["adlfs (>=2022.1.0)"] -hdf5 = ["h5py"] -huggingface-hub = ["huggingface_hub"] -iceberg = ["pyiceberg[glue] (>=0.9)"] -mysql = ["PyMySQL", "sqlalchemy"] -openai = ["openai"] -oracle = ["cx-Oracle", "libaio", "sqlalchemy"] -plot = ["matplotlib"] -postgres = ["psycopg2", "sqlalchemy"] -s3fs = ["s3fs (>=2022.1.0)"] -sklearn = ["scikit-learn"] -snowflake = ["snowflake-connector-python"] - [[package]] name = "boto3" -version = "1.39.11" +version = "1.36.3" description = "The AWS SDK for Python" optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] +python-versions = ">=3.8" files = [ - {file = "boto3-1.39.11-py3-none-any.whl", hash = "sha256:af8f1dad35eceff7658fab43b39b0f55892b6e3dd12308733521cc24dd2c9a02"}, - {file = "boto3-1.39.11.tar.gz", hash = "sha256:3027edf20642fe1d5f9dc50a420d0fe2733073ed6a9f0f047b60fe08c3682132"}, + {file = "boto3-1.36.3-py3-none-any.whl", hash = "sha256:f9843a5d06f501d66ada06f5a5417f671823af2cf319e36ceefa1bafaaaaa953"}, + {file = "boto3-1.36.3.tar.gz", hash = "sha256:53a5307f6a3526ee2f8590e3c45efa504a3ea4532c1bfe4926c0c19bf188d141"}, ] -markers = {main = "extra == \"dynamodb\" or extra == \"glue\" or extra == \"rest-sigv4\""} [package.dependencies] -botocore = ">=1.39.11,<1.40.0" +botocore = ">=1.36.3,<1.37.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.13.0,<0.14.0" +s3transfer = ">=0.11.0,<0.12.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.39.11" +version = "1.36.3" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] +python-versions = ">=3.8" files = [ - {file = "botocore-1.39.11-py3-none-any.whl", hash = "sha256:1545352931a8a186f3e977b1e1a4542d7d434796e274c3c62efd0210b5ea76dc"}, - {file = "botocore-1.39.11.tar.gz", hash = "sha256:953b12909d6799350e346ab038e55b6efe622c616f80aef74d7a6683ffdd972c"}, + {file = "botocore-1.36.3-py3-none-any.whl", hash = "sha256:536ab828e6f90dbb000e3702ac45fd76642113ae2db1b7b1373ad24104e89255"}, + {file = "botocore-1.36.3.tar.gz", hash = "sha256:775b835e979da5c96548ed1a0b798101a145aec3cd46541d62e27dda5a94d7f8"}, ] -markers = {main = "extra == \"dynamodb\" or extra == \"glue\" or extra == \"rest-sigv4\" or extra == \"s3fs\""} [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -564,7 +447,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.23.8)"] +crt = ["awscrt (==0.23.4)"] [[package]] name = "build" @@ -572,7 +455,6 @@ version = "1.2.2.post1" description = "A simple, correct Python build frontend" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, @@ -588,33 +470,31 @@ virtualenv = {version = ">=20.0.35", optional = true, markers = "extra == \"virt [package.extras] docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] uv = ["uv (>=0.1.18)"] virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachetools" -version = "5.5.2" +version = "5.5.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ - {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, - {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, + {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, + {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, ] [[package]] name = "certifi" -version = "2025.7.14" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.7" -groups = ["main", "dev", "docs"] +python-versions = ">=3.6" files = [ - {file = "certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2"}, - {file = "certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -623,7 +503,6 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -693,7 +572,6 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {main = "(extra == \"zstandard\" or extra == \"adlfs\") and platform_python_implementation == \"PyPy\" or extra == \"adlfs\"", dev = "platform_python_implementation != \"PyPy\""} [package.dependencies] pycparser = "*" @@ -704,7 +582,6 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -712,18 +589,17 @@ files = [ [[package]] name = "cfn-lint" -version = "1.38.0" +version = "1.24.0" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.8" files = [ - {file = "cfn_lint-1.38.0-py3-none-any.whl", hash = "sha256:336753eb5259022f6581e26cece84ef729ef3d06ca1445d02ade9a966474d915"}, - {file = "cfn_lint-1.38.0.tar.gz", hash = "sha256:356275ec13a1f9cd20f87ef4ff7396a34aefad633f4783126d8f5507400b925d"}, + {file = "cfn_lint-1.24.0-py3-none-any.whl", hash = "sha256:b8ca01ba384587aa7c0e09bf71cb4fa6993bd6da56498139f316e238f47d4eb8"}, + {file = "cfn_lint-1.24.0.tar.gz", hash = "sha256:8b081808aa3a2f60abf9ef4ce05b1f270adab5b81124376f2f8e95c2fd767fb6"}, ] [package.dependencies] -aws-sam-translator = ">=1.97.0" +aws-sam-translator = ">=1.94.0" jsonpatch = "*" networkx = ">=2.4,<4" pyyaml = ">5.4" @@ -739,104 +615,103 @@ sarif = ["jschema_to_python (>=1.2.3,<1.3.0)", "sarif-om (>=1.0.4,<1.1.0)"] [[package]] name = "charset-normalizer" -version = "3.4.2" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev", "docs"] -files = [ - {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, - {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, - {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] @@ -845,7 +720,6 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main", "dev", "docs"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -854,438 +728,336 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "cloudpickle" -version = "3.1.1" -description = "Pickler class to extend the standard pickle.Pickler functionality" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"bodo\"" -files = [ - {file = "cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e"}, - {file = "cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64"}, -] - [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev", "docs"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\" or os_name == \"nt\""} [[package]] name = "coverage" -version = "7.10.3" +version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "coverage-7.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53808194afdf948c462215e9403cca27a81cf150d2f9b386aee4dab614ae2ffe"}, - {file = "coverage-7.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f4d1b837d1abf72187a61645dbf799e0d7705aa9232924946e1f57eb09a3bf00"}, - {file = "coverage-7.10.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2a90dd4505d3cc68b847ab10c5ee81822a968b5191664e8a0801778fa60459fa"}, - {file = "coverage-7.10.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d52989685ff5bf909c430e6d7f6550937bc6d6f3e6ecb303c97a86100efd4596"}, - {file = "coverage-7.10.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdb558a1d97345bde3a9f4d3e8d11c9e5611f748646e9bb61d7d612a796671b5"}, - {file = "coverage-7.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c9e6331a8f09cb1fc8bda032752af03c366870b48cce908875ba2620d20d0ad4"}, - {file = "coverage-7.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:992f48bf35b720e174e7fae916d943599f1a66501a2710d06c5f8104e0756ee1"}, - {file = "coverage-7.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c5595fc4ad6a39312c786ec3326d7322d0cf10e3ac6a6df70809910026d67cfb"}, - {file = "coverage-7.10.3-cp310-cp310-win32.whl", hash = "sha256:9e92fa1f2bd5a57df9d00cf9ce1eb4ef6fccca4ceabec1c984837de55329db34"}, - {file = "coverage-7.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b96524d6e4a3ce6a75c56bb15dbd08023b0ae2289c254e15b9fbdddf0c577416"}, - {file = "coverage-7.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2ff2e2afdf0d51b9b8301e542d9c21a8d084fd23d4c8ea2b3a1b3c96f5f7397"}, - {file = "coverage-7.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:18ecc5d1b9a8c570f6c9b808fa9a2b16836b3dd5414a6d467ae942208b095f85"}, - {file = "coverage-7.10.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1af4461b25fe92889590d438905e1fc79a95680ec2a1ff69a591bb3fdb6c7157"}, - {file = "coverage-7.10.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3966bc9a76b09a40dc6063c8b10375e827ea5dfcaffae402dd65953bef4cba54"}, - {file = "coverage-7.10.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:205a95b87ef4eb303b7bc5118b47b6b6604a644bcbdb33c336a41cfc0a08c06a"}, - {file = "coverage-7.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b3801b79fb2ad61e3c7e2554bab754fc5f105626056980a2b9cf3aef4f13f84"}, - {file = "coverage-7.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0dc69c60224cda33d384572da945759756e3f06b9cdac27f302f53961e63160"}, - {file = "coverage-7.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a83d4f134bab2c7ff758e6bb1541dd72b54ba295ced6a63d93efc2e20cb9b124"}, - {file = "coverage-7.10.3-cp311-cp311-win32.whl", hash = "sha256:54e409dd64e5302b2a8fdf44ec1c26f47abd1f45a2dcf67bd161873ee05a59b8"}, - {file = "coverage-7.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:30c601610a9b23807c5e9e2e442054b795953ab85d525c3de1b1b27cebeb2117"}, - {file = "coverage-7.10.3-cp311-cp311-win_arm64.whl", hash = "sha256:dabe662312a97958e932dee056f2659051d822552c0b866823e8ba1c2fe64770"}, - {file = "coverage-7.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:449c1e2d3a84d18bd204258a897a87bc57380072eb2aded6a5b5226046207b42"}, - {file = "coverage-7.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d4f9ce50b9261ad196dc2b2e9f1fbbee21651b54c3097a25ad783679fd18294"}, - {file = "coverage-7.10.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4dd4564207b160d0d45c36a10bc0a3d12563028e8b48cd6459ea322302a156d7"}, - {file = "coverage-7.10.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5ca3c9530ee072b7cb6a6ea7b640bcdff0ad3b334ae9687e521e59f79b1d0437"}, - {file = "coverage-7.10.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b6df359e59fa243c9925ae6507e27f29c46698359f45e568fd51b9315dbbe587"}, - {file = "coverage-7.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a181e4c2c896c2ff64c6312db3bda38e9ade2e1aa67f86a5628ae85873786cea"}, - {file = "coverage-7.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a374d4e923814e8b72b205ef6b3d3a647bb50e66f3558582eda074c976923613"}, - {file = "coverage-7.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:daeefff05993e5e8c6e7499a8508e7bd94502b6b9a9159c84fd1fe6bce3151cb"}, - {file = "coverage-7.10.3-cp312-cp312-win32.whl", hash = "sha256:187ecdcac21f9636d570e419773df7bd2fda2e7fa040f812e7f95d0bddf5f79a"}, - {file = "coverage-7.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:4a50ad2524ee7e4c2a95e60d2b0b83283bdfc745fe82359d567e4f15d3823eb5"}, - {file = "coverage-7.10.3-cp312-cp312-win_arm64.whl", hash = "sha256:c112f04e075d3495fa3ed2200f71317da99608cbb2e9345bdb6de8819fc30571"}, - {file = "coverage-7.10.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b99e87304ffe0eb97c5308447328a584258951853807afdc58b16143a530518a"}, - {file = "coverage-7.10.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4af09c7574d09afbc1ea7da9dcea23665c01f3bc1b1feb061dac135f98ffc53a"}, - {file = "coverage-7.10.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:488e9b50dc5d2aa9521053cfa706209e5acf5289e81edc28291a24f4e4488f46"}, - {file = "coverage-7.10.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:913ceddb4289cbba3a310704a424e3fb7aac2bc0c3a23ea473193cb290cf17d4"}, - {file = "coverage-7.10.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b1f91cbc78c7112ab84ed2a8defbccd90f888fcae40a97ddd6466b0bec6ae8a"}, - {file = "coverage-7.10.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0bac054d45af7cd938834b43a9878b36ea92781bcb009eab040a5b09e9927e3"}, - {file = "coverage-7.10.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fe72cbdd12d9e0f4aca873fa6d755e103888a7f9085e4a62d282d9d5b9f7928c"}, - {file = "coverage-7.10.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c1e2e927ab3eadd7c244023927d646e4c15c65bb2ac7ae3c3e9537c013700d21"}, - {file = "coverage-7.10.3-cp313-cp313-win32.whl", hash = "sha256:24d0c13de473b04920ddd6e5da3c08831b1170b8f3b17461d7429b61cad59ae0"}, - {file = "coverage-7.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:3564aae76bce4b96e2345cf53b4c87e938c4985424a9be6a66ee902626edec4c"}, - {file = "coverage-7.10.3-cp313-cp313-win_arm64.whl", hash = "sha256:f35580f19f297455f44afcd773c9c7a058e52eb6eb170aa31222e635f2e38b87"}, - {file = "coverage-7.10.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07009152f497a0464ffdf2634586787aea0e69ddd023eafb23fc38267db94b84"}, - {file = "coverage-7.10.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd2ba5f0c7e7e8cc418be2f0c14c4d9e3f08b8fb8e4c0f83c2fe87d03eb655e"}, - {file = "coverage-7.10.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1ae22b97003c74186e034a93e4f946c75fad8c0ce8d92fbbc168b5e15ee2841f"}, - {file = "coverage-7.10.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eb329f1046888a36b1dc35504d3029e1dd5afe2196d94315d18c45ee380f67d5"}, - {file = "coverage-7.10.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce01048199a91f07f96ca3074b0c14021f4fe7ffd29a3e6a188ac60a5c3a4af8"}, - {file = "coverage-7.10.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08b989a06eb9dfacf96d42b7fb4c9a22bafa370d245dc22fa839f2168c6f9fa1"}, - {file = "coverage-7.10.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:669fe0d4e69c575c52148511029b722ba8d26e8a3129840c2ce0522e1452b256"}, - {file = "coverage-7.10.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3262d19092771c83f3413831d9904b1ccc5f98da5de4ffa4ad67f5b20c7aaf7b"}, - {file = "coverage-7.10.3-cp313-cp313t-win32.whl", hash = "sha256:cc0ee4b2ccd42cab7ee6be46d8a67d230cb33a0a7cd47a58b587a7063b6c6b0e"}, - {file = "coverage-7.10.3-cp313-cp313t-win_amd64.whl", hash = "sha256:03db599f213341e2960430984e04cf35fb179724e052a3ee627a068653cf4a7c"}, - {file = "coverage-7.10.3-cp313-cp313t-win_arm64.whl", hash = "sha256:46eae7893ba65f53c71284585a262f083ef71594f05ec5c85baf79c402369098"}, - {file = "coverage-7.10.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:bce8b8180912914032785850d8f3aacb25ec1810f5f54afc4a8b114e7a9b55de"}, - {file = "coverage-7.10.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07790b4b37d56608536f7c1079bd1aa511567ac2966d33d5cec9cf520c50a7c8"}, - {file = "coverage-7.10.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e79367ef2cd9166acedcbf136a458dfe9a4a2dd4d1ee95738fb2ee581c56f667"}, - {file = "coverage-7.10.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:419d2a0f769f26cb1d05e9ccbc5eab4cb5d70231604d47150867c07822acbdf4"}, - {file = "coverage-7.10.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee221cf244757cdc2ac882e3062ab414b8464ad9c884c21e878517ea64b3fa26"}, - {file = "coverage-7.10.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c2079d8cdd6f7373d628e14b3357f24d1db02c9dc22e6a007418ca7a2be0435a"}, - {file = "coverage-7.10.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:bd8df1f83c0703fa3ca781b02d36f9ec67ad9cb725b18d486405924f5e4270bd"}, - {file = "coverage-7.10.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6b4e25e0fa335c8aa26e42a52053f3786a61cc7622b4d54ae2dad994aa754fec"}, - {file = "coverage-7.10.3-cp314-cp314-win32.whl", hash = "sha256:d7c3d02c2866deb217dce664c71787f4b25420ea3eaf87056f44fb364a3528f5"}, - {file = "coverage-7.10.3-cp314-cp314-win_amd64.whl", hash = "sha256:9c8916d44d9e0fe6cdb2227dc6b0edd8bc6c8ef13438bbbf69af7482d9bb9833"}, - {file = "coverage-7.10.3-cp314-cp314-win_arm64.whl", hash = "sha256:1007d6a2b3cf197c57105cc1ba390d9ff7f0bee215ced4dea530181e49c65ab4"}, - {file = "coverage-7.10.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ebc8791d346410d096818788877d675ca55c91db87d60e8f477bd41c6970ffc6"}, - {file = "coverage-7.10.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f4e4d8e75f6fd3c6940ebeed29e3d9d632e1f18f6fb65d33086d99d4d073241"}, - {file = "coverage-7.10.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:24581ed69f132b6225a31b0228ae4885731cddc966f8a33fe5987288bdbbbd5e"}, - {file = "coverage-7.10.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec151569ddfccbf71bac8c422dce15e176167385a00cd86e887f9a80035ce8a5"}, - {file = "coverage-7.10.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2ae8e7c56290b908ee817200c0b65929b8050bc28530b131fe7c6dfee3e7d86b"}, - {file = "coverage-7.10.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb742309766d7e48e9eb4dc34bc95a424707bc6140c0e7d9726e794f11b92a0"}, - {file = "coverage-7.10.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:c65e2a5b32fbe1e499f1036efa6eb9cb4ea2bf6f7168d0e7a5852f3024f471b1"}, - {file = "coverage-7.10.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d48d2cb07d50f12f4f18d2bb75d9d19e3506c26d96fffabf56d22936e5ed8f7c"}, - {file = "coverage-7.10.3-cp314-cp314t-win32.whl", hash = "sha256:dec0d9bc15ee305e09fe2cd1911d3f0371262d3cfdae05d79515d8cb712b4869"}, - {file = "coverage-7.10.3-cp314-cp314t-win_amd64.whl", hash = "sha256:424ea93a323aa0f7f01174308ea78bde885c3089ec1bef7143a6d93c3e24ef64"}, - {file = "coverage-7.10.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f5983c132a62d93d71c9ef896a0b9bf6e6828d8d2ea32611f58684fba60bba35"}, - {file = "coverage-7.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da749daa7e141985487e1ff90a68315b0845930ed53dc397f4ae8f8bab25b551"}, - {file = "coverage-7.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3126fb6a47d287f461d9b1aa5d1a8c97034d1dffb4f452f2cf211289dae74ef"}, - {file = "coverage-7.10.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3da794db13cc27ca40e1ec8127945b97fab78ba548040047d54e7bfa6d442dca"}, - {file = "coverage-7.10.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4e27bebbd184ef8d1c1e092b74a2b7109dcbe2618dce6e96b1776d53b14b3fe8"}, - {file = "coverage-7.10.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8fd4ee2580b9fefbd301b4f8f85b62ac90d1e848bea54f89a5748cf132782118"}, - {file = "coverage-7.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6999920bdd73259ce11cabfc1307484f071ecc6abdb2ca58d98facbcefc70f16"}, - {file = "coverage-7.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3623f929db885fab100cb88220a5b193321ed37e03af719efdbaf5d10b6e227"}, - {file = "coverage-7.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:25b902c5e15dea056485d782e420bb84621cc08ee75d5131ecb3dbef8bd1365f"}, - {file = "coverage-7.10.3-cp39-cp39-win32.whl", hash = "sha256:f930a4d92b004b643183451fe9c8fe398ccf866ed37d172ebaccfd443a097f61"}, - {file = "coverage-7.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:08e638a93c8acba13c7842953f92a33d52d73e410329acd472280d2a21a6c0e1"}, - {file = "coverage-7.10.3-py3-none-any.whl", hash = "sha256:416a8d74dc0adfd33944ba2f405897bab87b7e9e84a391e09d241956bd953ce1"}, - {file = "coverage-7.10.3.tar.gz", hash = "sha256:812ba9250532e4a823b070b0420a36499859542335af3dca8f47fc6aa1a05619"}, +files = [ + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "cramjam" -version = "2.10.0" +version = "2.9.1" description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"snappy\"" -files = [ - {file = "cramjam-2.10.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:26c44f17938cf00a339899ce6ea7ba12af7b1210d707a80a7f14724fba39869b"}, - {file = "cramjam-2.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ce208a3e4043b8ce89e5d90047da16882456ea395577b1ee07e8215dce7d7c91"}, - {file = "cramjam-2.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2c24907c972aca7b56c8326307e15d78f56199852dda1e67e4e54c2672afede4"}, - {file = "cramjam-2.10.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f25db473667774725e4f34e738d644ffb205bf0bdc0e8146870a1104c5f42e4a"}, - {file = "cramjam-2.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51eb00c72d4a93e4a2ddcc751ba2a7a1318026247e80742866912ec82b39e5ce"}, - {file = "cramjam-2.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:def47645b1b970fd97f063da852b0ddc4f5bdee9af8d5b718d9682c7b828d89d"}, - {file = "cramjam-2.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42dcd7c83104edae70004a8dc494e4e57de4940e3019e5d2cbec2830d5908a85"}, - {file = "cramjam-2.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0744e391ea8baf0ddea5a180b0aa71a6a302490c14d7a37add730bf0172c7c6"}, - {file = "cramjam-2.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5018c7414047f640b126df02e9286a8da7cc620798cea2b39bac79731c2ee336"}, - {file = "cramjam-2.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4b201aacc7a06079b063cfbcf5efe78b1e65c7279b2828d06ffaa90a8316579d"}, - {file = "cramjam-2.10.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5264ac242697fbb1cfffa79d0153cbc4c088538bd99d60cfa374e8a8b83e2bb5"}, - {file = "cramjam-2.10.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e193918c81139361f3f45db19696d31847601f2c0e79a38618f34d7bff6ee704"}, - {file = "cramjam-2.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22a7ab05c62b0a71fcd6db4274af1508c5ea039a43fb143ac50a62f86e6f32f7"}, - {file = "cramjam-2.10.0-cp310-cp310-win32.whl", hash = "sha256:2464bdf0e2432e0f07a834f48c16022cd7f4648ed18badf52c32c13d6722518c"}, - {file = "cramjam-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:73b6ffc8ffe6546462ccc7e34ca3acd9eb3984e1232645f498544a7eab6b8aca"}, - {file = "cramjam-2.10.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:fb73ee9616e3efd2cf3857b019c66f9bf287bb47139ea48425850da2ae508670"}, - {file = "cramjam-2.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:acef0e2c4d9f38428721a0ec878dee3fb73a35e640593d99c9803457dbb65214"}, - {file = "cramjam-2.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b21b1672814ecce88f1da76635f0483d2d877d4cb8998db3692792f46279bf1"}, - {file = "cramjam-2.10.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7699d61c712bc77907c48fe63a21fffa03c4dd70401e1d14e368af031fde7c21"}, - {file = "cramjam-2.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3484f1595eef64cefed05804d7ec8a88695f89086c49b086634e44c16f3d4769"}, - {file = "cramjam-2.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38fba4594dd0e2b7423ef403039e63774086ebb0696d9060db20093f18a2f43e"}, - {file = "cramjam-2.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07fe3e48c881a75a11f722e1d5b052173b5e7c78b22518f659b8c9b4ac4c937"}, - {file = "cramjam-2.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3596b6ceaf85f872c1e56295c6ec80bb15fdd71e7ed9e0e5c3e654563dcc40a2"}, - {file = "cramjam-2.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c03360c1760f8608dc5ce1ddd7e5491180765360cae8104b428d5f86fbe1b9"}, - {file = "cramjam-2.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3e0b70fe7796b63b87cb7ebfaad0ebaca7574fdf177311952f74b8bda6522fb8"}, - {file = "cramjam-2.10.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:d61a21e4153589bd53ffe71b553f93f2afbc8fb7baf63c91a83c933347473083"}, - {file = "cramjam-2.10.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:91ab85752a08dc875a05742cfda0234d7a70fadda07dd0b0582cfe991911f332"}, - {file = "cramjam-2.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c6afff7e9da53afb8d11eae27a20ee5709e2943b39af6c949b38424d0f271569"}, - {file = "cramjam-2.10.0-cp311-cp311-win32.whl", hash = "sha256:adf484b06063134ae604d4fc826d942af7e751c9d0b2fcab5bf1058a8ebe242b"}, - {file = "cramjam-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9e20ebea6ec77232cd12e4084c8be6d03534dc5f3d027d365b32766beafce6c3"}, - {file = "cramjam-2.10.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0acb17e3681138b48300b27d3409742c81d5734ec39c650a60a764c135197840"}, - {file = "cramjam-2.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:647553c44cf6b5ce2d9b56e743cc1eab886940d776b36438183e807bb5a7a42b"}, - {file = "cramjam-2.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c52805c7ccb533fe42d3d36c91d237c97c3b6551cd6b32f98b79eeb30d0f139"}, - {file = "cramjam-2.10.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:337ceb50bde7708b2a4068f3000625c23ceb1b2497edce2e21fd08ef58549170"}, - {file = "cramjam-2.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c071765bdd5eefa3b2157a61e84d72e161b63f95eb702a0133fee293800a619"}, - {file = "cramjam-2.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b40d46d2aa566f8e3def953279cce0191e47364b453cda492db12a84dd97f78"}, - {file = "cramjam-2.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c7bab3703babb93c9dd4444ac9797d01ec46cf521e247d3319bfb292414d053"}, - {file = "cramjam-2.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba19308b8e19cdaadfbf47142f52b705d2cbfb8edd84a8271573e50fa7fa022d"}, - {file = "cramjam-2.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3e4be5aa71b73c2640c9b86e435ec033592f7f79787937f8342259106a63ae"}, - {file = "cramjam-2.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:11c5ef0c70d6bdd8e1d8afed8b0430709b22decc3865eb6c0656aa00117a7b3d"}, - {file = "cramjam-2.10.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:86b29e349064821ceeb14d60d01a11a0788f94e73ed4b3a5c3f9fac7aa4e2cd7"}, - {file = "cramjam-2.10.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2c7008bb54bdc5d130c0e8581925dfcbdc6f0a4d2051de7a153bfced9a31910f"}, - {file = "cramjam-2.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a94fe7024137ed8bf200308000d106874afe52ff203f852f43b3547eddfa10e"}, - {file = "cramjam-2.10.0-cp312-cp312-win32.whl", hash = "sha256:ce11be5722c9d433c5e1eb3980f16eb7d80828b9614f089e28f4f1724fc8973f"}, - {file = "cramjam-2.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a01e89e99ba066dfa2df40fe99a2371565f4a3adc6811a73c8019d9929a312e8"}, - {file = "cramjam-2.10.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8bb0b6aaaa5f37091e05d756a3337faf0ddcffe8a68dbe8a710731b0d555ec8f"}, - {file = "cramjam-2.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:27b2625c0840b9a5522eba30b165940084391762492e03b9d640fca5074016ae"}, - {file = "cramjam-2.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4ba90f7b8f986934f33aad8cc029cf7c74842d3ecd5eda71f7531330d38a8dc4"}, - {file = "cramjam-2.10.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6655d04942f7c02087a6bba4bdc8d88961aa8ddf3fb9a05b3bad06d2d1ca321b"}, - {file = "cramjam-2.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dda9be2caf067ac21c4aa63497833e0984908b66849c07aaa42b1cfa93f5e1c"}, - {file = "cramjam-2.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:afa36aa006d7692718fce427ecb276211918447f806f80c19096a627f5122e3d"}, - {file = "cramjam-2.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d46fd5a9e8eb5d56eccc6191a55e3e1e2b3ab24b19ab87563a2299a39c855fd7"}, - {file = "cramjam-2.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3012564760394dff89e7a10c5a244f8885cd155aec07bdbe2d6dc46be398614"}, - {file = "cramjam-2.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2d216ed4aca2090eabdd354204ae55ed3e13333d1a5b271981543696e634672"}, - {file = "cramjam-2.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:44c2660ee7c4c269646955e4e40c2693f803fbad12398bb31b2ad00cfc6027b8"}, - {file = "cramjam-2.10.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:636a48e2d01fe8d7955e9523efd2f8efce55a0221f3b5d5b4bdf37c7ff056bf1"}, - {file = "cramjam-2.10.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:44c15f6117031a84497433b5f55d30ee72d438fdcba9778fec0c5ca5d416aa96"}, - {file = "cramjam-2.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76e4e42f2ecf1aca0a710adaa23000a192efb81a2aee3bcc16761f1777f08a74"}, - {file = "cramjam-2.10.0-cp313-cp313-win32.whl", hash = "sha256:5b34f4678d386c64d3be402fdf67f75e8f1869627ea2ec4decd43e828d3b6fba"}, - {file = "cramjam-2.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:88754dd516f0e2f4dd242880b8e760dc854e917315a17fe3fc626475bea9b252"}, - {file = "cramjam-2.10.0-cp38-cp38-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:645827af834a64145ba4b06f703342b2dbe1d40d1a48fb04e82373bd95cf68e2"}, - {file = "cramjam-2.10.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:570c81f991033e624874475ade96b601f1db2c51b3e69c324072adcfb23ef5aa"}, - {file = "cramjam-2.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:06ad4a8b368d30ded1d932d9eed647962fbe44923269185a6bbd5e0d11cc39ab"}, - {file = "cramjam-2.10.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bcedda2ef2560e6e62cac03734ab1ad28616206b4d4f2d138440b4f43e18c395"}, - {file = "cramjam-2.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68362d87372a90b9717536238c81d74d7feb4a14392ac239ceb61c1c199a9bac"}, - {file = "cramjam-2.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff7b95bd299c9360e7cb8d226002d58e2917f594ea5af0373efc713f896622b9"}, - {file = "cramjam-2.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2742eea6e336961167c5b6a2393fa04d54bdb10980f0d60ea36ed0a824e9a20"}, - {file = "cramjam-2.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8695857e0b0b5289fabb6c200b95e2b18d8575551ddd9d50746b3d78b6fb5aa8"}, - {file = "cramjam-2.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac5a8a3ef660e6869a7761cd0664223eb546b2d17e9121c8ab0ad46353635611"}, - {file = "cramjam-2.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d86c1e2006fe82a8679ed851c2462a6019b57255b3902d16ac35df4a37f6cdd"}, - {file = "cramjam-2.10.0-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:a094ca72440364bc1d0a793555875e515b0d7cc0eef171f4cd49c7e4855ba06e"}, - {file = "cramjam-2.10.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:05793857773ec62101edf2c0d22d8edc955707727124f637d2f6cc138e5f97aa"}, - {file = "cramjam-2.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b8dee2e4a402dac2df110e7b02fae49507a63b44b6fd91350cf069f31545a925"}, - {file = "cramjam-2.10.0-cp38-cp38-win32.whl", hash = "sha256:001fc2572adc655406fb899087f57a740e58a800b05acdccac8bf5759b617d90"}, - {file = "cramjam-2.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:9cadef44f5ad4c5b4d06ba3c28464d70241a40539c0343b1821ba43102b6a9fc"}, - {file = "cramjam-2.10.0-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:967f5f0f22bf5dba4e4d7abe9594b28f5da95606225a50555926ff6e975d84dd"}, - {file = "cramjam-2.10.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:260732e3b5c56d6182586f3a7fc5e3f3641b27bfbad5883e8d8e292af85a6870"}, - {file = "cramjam-2.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eafdc9d1721afcb4be9d20b980b61d404a592c19067197976a4077f52727bd1a"}, - {file = "cramjam-2.10.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28a13c0317e71121b2059ffa8beefa2b185be241c52f740f6eb261f0067186db"}, - {file = "cramjam-2.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3e0067ae3513e4cbd0efbabbe5a2bcfa2c2d4bddc67188eeb0751b9a02fdb7"}, - {file = "cramjam-2.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:112638a4cdf806509d2d2661cb519d239d731bd5fd2e95f211c48ac0f0deeab5"}, - {file = "cramjam-2.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ddbf6a3d3def7ae46638ebf87d7746ccebf22f885a87884ac24d97943af3f30"}, - {file = "cramjam-2.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2923b8cd2fcbd22e0842decb66bf925a9e95bda165490d037c355e5df8fef68"}, - {file = "cramjam-2.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab6f36c772109c974890eafff2a841ddbf38ea1293b01a778b28f26089a890d"}, - {file = "cramjam-2.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:17dda15edf256362edb30dcb1d5ecdcd727d946c6be0d1b130e736f3f49487dc"}, - {file = "cramjam-2.10.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:92fd6e784ade210c3522bc627b3938821d12fac52acefe4d6630460e243e28de"}, - {file = "cramjam-2.10.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a120fc0514c9ed9a4051d040ddd36176241d4f54c4a37d8e4f3d29ac9bdb4c3a"}, - {file = "cramjam-2.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a71ab695a16c6d5aeae1f02fcc37fbd1ae876e8fb339337aca187012a3d6c0a2"}, - {file = "cramjam-2.10.0-cp39-cp39-win32.whl", hash = "sha256:61b7f3c81e5e9015e73e5f423706b2f5e85a07ce79dea35645fad93505ff06cf"}, - {file = "cramjam-2.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:0d27fe3e316f9ae7fe1367b6daf0ffc993c1c66edae588165ac0f41f91a5a6b1"}, - {file = "cramjam-2.10.0-pp310-pypy310_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77192bc1a9897ecd91cf977a5d5f990373e35a8d028c9141c8c3d3680a4a4cd7"}, - {file = "cramjam-2.10.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50b59e981f219d6840ac43cda8e885aff1457944ddbabaa16ac047690bfd6ad1"}, - {file = "cramjam-2.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d84581c869d279fab437182d5db2b590d44975084e8d50b164947f7aaa2c5f25"}, - {file = "cramjam-2.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04f54bea9ce39c440d1ac6901fe4d647f9218dd5cd8fe903c6fe9c42bf5e1f3b"}, - {file = "cramjam-2.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cddd12ee5a2ef4100478db7f5563a9cdb8bc0a067fbd8ccd1ecdc446d2e6a41a"}, - {file = "cramjam-2.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35bcecff38648908a4833928a892a1e7a32611171785bef27015107426bc1d9d"}, - {file = "cramjam-2.10.0-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:1e826469cfbb6dcd5b967591e52855073267835229674cfa3d327088805855da"}, - {file = "cramjam-2.10.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1a200b74220dcd80c2bb99e3bfe1cdb1e4ed0f5c071959f4316abd65f9ef1e39"}, - {file = "cramjam-2.10.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2e419b65538786fc1f0cf776612262d4bf6c9449983d3fc0d0acfd86594fe551"}, - {file = "cramjam-2.10.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf1321a40da930edeff418d561dfb03e6d59d5b8ab5cbab1c4b03ff0aa4c6d21"}, - {file = "cramjam-2.10.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04376601c8f9714fb3a6a0a1699b85aab665d9d952a2a31fb37cf70e1be1fba"}, - {file = "cramjam-2.10.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2c1eb6e6c3d5c1cc3f7c7f8a52e034340a3c454641f019687fa94077c05da5c2"}, - {file = "cramjam-2.10.0.tar.gz", hash = "sha256:e821dd487384ae8004e977c3b13135ad6665ccf8c9874e68441cad1146e66d8a"}, +files = [ + {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, + {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, + {file = "cramjam-2.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:21ea784e6c3f1843d3523ae0f03651dd06058b39eeb64beb82ee3b100fa83662"}, + {file = "cramjam-2.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0c5d98a4e791f0bbd0ffcb7dae879baeb2dcc357348a8dc2be0a8c10403a2a"}, + {file = "cramjam-2.9.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e076fd87089197cb61117c63dbe7712ad5eccb93968860eb3bae09b767bac813"}, + {file = "cramjam-2.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d86b44933aea0151e4a2e1e6935448499849045c38167d288ca4c59d5b8cd4e"}, + {file = "cramjam-2.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7eb032549dec897b942ddcf80c1cdccbcb40629f15fc902731dbe6362da49326"}, + {file = "cramjam-2.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf29b4def86ec503e329fe138842a9b79a997e3beb6c7809b05665a0d291edff"}, + {file = "cramjam-2.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a36adf7d13b7accfa206e1c917f08924eb905b45aa8e62176509afa7b14db71e"}, + {file = "cramjam-2.9.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:cf4ea758d98b6fad1b4b2d808d0de690d3162ac56c26968aea0af6524e3eb736"}, + {file = "cramjam-2.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4826d6d81ea490fa7a3ae7a4b9729866a945ffac1f77fe57b71e49d6e1b21efd"}, + {file = "cramjam-2.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:335103317475bf992953c58838152a4761fc3c87354000edbfc4d7e57cf05909"}, + {file = "cramjam-2.9.1-cp310-cp310-win32.whl", hash = "sha256:258120cb1e3afc3443f756f9de161ed63eed56a2c31f6093e81c571c0f2dc9f6"}, + {file = "cramjam-2.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c60e5996aa02547d12bc2740d44e90e006b0f93100f53206f7abe6732ad56e69"}, + {file = "cramjam-2.9.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9db1debe48060e41a5b91af9193c524e473c57f6105462c5524a41f5aabdb88"}, + {file = "cramjam-2.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f6f18f0242212d3409d26ce3874937b5b979cebd61f08b633a6ea893c32fc7b6"}, + {file = "cramjam-2.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b5b1cd7d39242b2b903cf09cd4696b3a6e04dc537ffa9f3ac8668edae76eecb6"}, + {file = "cramjam-2.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47de0a68f5f4d9951250ef5af31f2a7228132caa9ed60994234f7eb98090d33"}, + {file = "cramjam-2.9.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e13c9a697881e5e38148958612dc6856967f5ff8cd7bba5ff751f2d6ac020aa4"}, + {file = "cramjam-2.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba560244bc1335b420b74e91e35f9d4e7f307a3be3a4603ce0f0d7e15a0acdf0"}, + {file = "cramjam-2.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d47fd41ce260cf4f0ff0e788de961fab9e9c6844a05ce55d06ce31e06107bdc"}, + {file = "cramjam-2.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d154fbadece82935396eb6bcb502085d944d2fd13b07a94348364344370c2c"}, + {file = "cramjam-2.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:038df668ffb94d64d67b6ecc59cbd206745a425ffc0402897dde12d89fa6a870"}, + {file = "cramjam-2.9.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:4125d8cd86fa08495d310e80926c2f0563f157b76862e7479f9b2cf94823ea0c"}, + {file = "cramjam-2.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4206ebdd1d1ef0f3f86c8c2f7c426aa4af6094f4f41e274601fd4c4569f37454"}, + {file = "cramjam-2.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab687bef5c493732b9a4ab870542ee43f5eae0025f9c684c7cb399c3a85cb380"}, + {file = "cramjam-2.9.1-cp311-cp311-win32.whl", hash = "sha256:dda7698b6d7caeae1047adafebc4b43b2a82478234f6c2b45bc3edad854e0600"}, + {file = "cramjam-2.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:872b00ff83e84bcbdc7e951af291ebe65eed20b09c47e7c4af21c312f90b796f"}, + {file = "cramjam-2.9.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:79417957972553502b217a0093532e48893c8b4ca30ccc941cefe9c72379df7c"}, + {file = "cramjam-2.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce2b94117f373defc876f88e74e44049a9969223dbca3240415b71752d0422fb"}, + {file = "cramjam-2.9.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:67040e0fd84404885ec716a806bee6110f9960c3647e0ef1670aab3b7375a70a"}, + {file = "cramjam-2.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bedb84e068b53c944bd08dcb501fd00d67daa8a917922356dd559b484ce7eab"}, + {file = "cramjam-2.9.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:06e3f97a379386d97debf08638a78b3d3850fdf6124755eb270b54905a169930"}, + {file = "cramjam-2.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11118675e9c7952ececabc62f023290ee4f8ecf0bee0d2c7eb8d1c402ee9769d"}, + {file = "cramjam-2.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b7de6b61b11545570e4d6033713f3599525efc615ee353a822be8f6b0c65b77"}, + {file = "cramjam-2.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57ca8f3775324a9de3ee6f05ca172687ba258c0dea79f7e3a6b4112834982f2a"}, + {file = "cramjam-2.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9847dd6f288f1c56359f52acb48ff2df848ff3e3bff34d23855bbcf7016427cc"}, + {file = "cramjam-2.9.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:8d1248dfa7f151e893ce819670f00879e4b7650b8d4c01279ce4f12140d68dd2"}, + {file = "cramjam-2.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9da6d970281083bae91b914362de325414aa03c01fc806f6bb2cc006322ec834"}, + {file = "cramjam-2.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c33bc095db5733c841a102b8693062be5db8cdac17b9782ebc00577c6a94480"}, + {file = "cramjam-2.9.1-cp312-cp312-win32.whl", hash = "sha256:9e9193cd4bb57e7acd3af24891526299244bfed88168945efdaa09af4e50720f"}, + {file = "cramjam-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:15955dd75e80f66c1ea271167a5347661d9bdc365f894a57698c383c9b7d465c"}, + {file = "cramjam-2.9.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5a7797a2fff994fc5e323f7a967a35a3e37e3006ed21d64dcded086502f482af"}, + {file = "cramjam-2.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d51b9b140b1df39a44bff7896d98a10da345b7d5f5ce92368d328c1c2c829167"}, + {file = "cramjam-2.9.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:07ac76b7f992556e7aa910244be11ece578cdf84f4d5d5297461f9a895e18312"}, + {file = "cramjam-2.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d90a72608c7550cd7eba914668f6277bfb0b24f074d1f1bd9d061fcb6f2adbd6"}, + {file = "cramjam-2.9.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:56495975401b1821dbe1f29cf222e23556232209a2fdb809fe8156d120ca9c7f"}, + {file = "cramjam-2.9.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b695259e71fde6d5be66b77a4474523ced9ffe9fe8a34cb9b520ec1241a14d3"}, + {file = "cramjam-2.9.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab1e69dc4831bbb79b6d547077aae89074c83e8ad94eba1a3d80e94d2424fd02"}, + {file = "cramjam-2.9.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440b489902bfb7a26d3fec1ca888007615336ff763d2a32a2fc40586548a0dbf"}, + {file = "cramjam-2.9.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:217fe22b41f8c3dce03852f828b059abfad11d1344a1df2f43d3eb8634b18d75"}, + {file = "cramjam-2.9.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:95f3646ddc98af25af25d5692ae65966488a283813336ea9cf41b22e542e7c0d"}, + {file = "cramjam-2.9.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:6b19fc60ead1cae9795a5b359599da3a1c95d38f869bdfb51c441fd76b04e926"}, + {file = "cramjam-2.9.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8dc5207567459d049696f62a1fdfb220f3fe6aa0d722285d44753e12504dac6c"}, + {file = "cramjam-2.9.1-cp313-cp313-win32.whl", hash = "sha256:fbfe35929a61b914de9e5dbacde0cfbba86cbf5122f9285a24c14ed0b645490b"}, + {file = "cramjam-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:06068bd191a82ad4fc1ac23d6f8627fb5e37ec4be0431711b9a2dbacaccfeddb"}, + {file = "cramjam-2.9.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a2ca4d3c683d28d3217821029eb08d3487d5043d7eb455df11ff3cacfd4c916"}, + {file = "cramjam-2.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:008b49b455b396acc5459dfb06fb9d56049c4097ee8e590892a4d3da9a711da3"}, + {file = "cramjam-2.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45c18cc13156e8697a8d3f9e57e49a69b00e14a103196efab0893fae1a5257f8"}, + {file = "cramjam-2.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d14a0efb21e0fec0631bcd66040b06e6a0fe10825f3aacffded38c1c978bdff9"}, + {file = "cramjam-2.9.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f815fb0eba625af45139af4f90f5fc2ddda61b171c2cc3ab63d44b40c5c7768"}, + {file = "cramjam-2.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04828cbfad7384f06a4a7d0d927c3e85ef11dc5a40b9cf5f3e29ac4e23ecd678"}, + {file = "cramjam-2.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0944a7c3a78f940c06d1b29bdce91a17798d80593dd01ebfeb842761e48a8b5"}, + {file = "cramjam-2.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec769e5b16251704502277a1163dcf2611551452d7590ff4cc422b7b0367fc96"}, + {file = "cramjam-2.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ba79c7d2cc5adb897b690c05dd9b67c4d401736d207314b99315f7be3cd94fd"}, + {file = "cramjam-2.9.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d35923fb5411bde30b53c0696dff8e24c8a38b010b89544834c53f4462fd71df"}, + {file = "cramjam-2.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:da0cc0efdbfb8ee2361f89f38ded03d11678f37e392afff7a97b09c55dadfc83"}, + {file = "cramjam-2.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f89924858712b8b936f04f3d690e72825a3e5127a140b434c79030c1c5a887ce"}, + {file = "cramjam-2.9.1-cp38-cp38-win32.whl", hash = "sha256:5925a738b8478f223ab9756fc794e3cabd5917fd7846f66adcf1d5fc2bf9864c"}, + {file = "cramjam-2.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:b7ac273498a2c6772d67707e101b74014c0d9413bb4711c51d8ec311de59b4b1"}, + {file = "cramjam-2.9.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:af39006faddfc6253beb93ca821d544931cfee7f0177b99ff106dfd8fd6a2cd8"}, + {file = "cramjam-2.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b3291be0d3f73d5774d69013be4ab33978c777363b5312d14f62f77817c2f75a"}, + {file = "cramjam-2.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1539fd758f0e57fad7913cebff8baaee871bb561ddf6fa710a427b74da6b6778"}, + {file = "cramjam-2.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff362f68bd68ac0eccb445209238d589bba728fb6d7f2e9dc199e0ec3a61d6e0"}, + {file = "cramjam-2.9.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23b9786d1d17686fb8d600ade2a19374c7188d4b8867efa9af0d8274a220aec7"}, + {file = "cramjam-2.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bc9c2c748aaf91863d89c4583f529c1c709485c94f8dfeb3ee48662d88e3258"}, + {file = "cramjam-2.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd0fa9a0e7f18224b6d2d1d69dbdc3aecec80ef1393c59244159b131604a4395"}, + {file = "cramjam-2.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ceef6e09ee22457997370882aa3c69de01e6dd0aaa2f953e1e87ad11641d042"}, + {file = "cramjam-2.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1376f6fdbf0b30712413a0b4e51663a4938ae2f6b449f8e4635dbb3694db83cf"}, + {file = "cramjam-2.9.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:342fb946f8d3e9e35b837288b03ab23cfbe0bb5a30e582ed805ef79706823a96"}, + {file = "cramjam-2.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a237064a6e2c2256c9a1cf2beb7c971382190c0f1eb2e810e02e971881756132"}, + {file = "cramjam-2.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53145fc9f2319c1245d4329e1da8cfacd6e35e27090c07c0b9d453ae2bbdac3e"}, + {file = "cramjam-2.9.1-cp39-cp39-win32.whl", hash = "sha256:8a9f52c27292c21457f43c4ce124939302a9acfb62295e7cda8667310563a5a3"}, + {file = "cramjam-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:8097ee39b61c86848a443c0b25b2df1de6b331fd512b20836a4f5cfde51ab255"}, + {file = "cramjam-2.9.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:86824c695688fcd06c5ac9bbd3fea9bdfb4cca194b1e706fbf11a629df48d2b4"}, + {file = "cramjam-2.9.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:27571bfa5a5d618604696747d0dc1d2a99b5906c967c8dee53c13a7107edfde6"}, + {file = "cramjam-2.9.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb01f6e38719818778144d3165a89ea1ad9dc58c6342b7f20aa194c70f34cbd1"}, + {file = "cramjam-2.9.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5cef5cf40725fe64592af9ec163e7389855077700678a1d94bec549403a74d"}, + {file = "cramjam-2.9.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ac48b978aa0675f62b642750e798c394a64d25ce852e4e541f69bef9a564c2f0"}, + {file = "cramjam-2.9.1.tar.gz", hash = "sha256:336cc591d86cbd225d256813779f46624f857bc9c779db126271eff9ddc524ae"}, ] [package.extras] -dev = ["black (==22.3.0)", "hypothesis (<6.123.0)", "numpy", "pytest (>=5.30)", "pytest-benchmark", "pytest-xdist"] +dev = ["black (==22.3.0)", "hypothesis", "numpy", "pytest (>=5.30)", "pytest-benchmark", "pytest-xdist"] [[package]] name = "cryptography" -version = "45.0.5" +version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main", "dev"] -files = [ - {file = "cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27"}, - {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e"}, - {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174"}, - {file = "cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9"}, - {file = "cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63"}, - {file = "cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492"}, - {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0"}, - {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a"}, - {file = "cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f"}, - {file = "cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f"}, - {file = "cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a"}, -] -markers = {main = "extra == \"adlfs\""} +files = [ + {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, + {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, + {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, + {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, + {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, + {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, + {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, +] [package.dependencies] -cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] -pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==45.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] name = "cython" -version = "3.1.2" +version = "3.0.12" description = "The Cython compiler for writing C extensions in the Python language." optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "cython-3.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0f2add8b23cb19da3f546a688cd8f9e0bfc2776715ebf5e283bc3113b03ff008"}, - {file = "cython-3.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0d6248a2ae155ca4c42d7fa6a9a05154d62e695d7736bc17e1b85da6dcc361df"}, - {file = "cython-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:262bf49d9da64e2a34c86cbf8de4aa37daffb0f602396f116cca1ed47dc4b9f2"}, - {file = "cython-3.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae53ae93c699d5f113953a9869df2fc269d8e173f9aa0616c6d8d6e12b4e9827"}, - {file = "cython-3.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b417c5d046ce676ee595ec7955ed47a68ad6f419cbf8c2a8708e55a3b38dfa35"}, - {file = "cython-3.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:af127da4b956e0e906e552fad838dc3fb6b6384164070ceebb0d90982a8ae25a"}, - {file = "cython-3.1.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9be3d4954b46fd0f2dceac011d470f658eaf819132db52fbd1cf226ee60348db"}, - {file = "cython-3.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:63da49672c4bb022b4de9d37bab6c29953dbf5a31a2f40dffd0cf0915dcd7a17"}, - {file = "cython-3.1.2-cp310-cp310-win32.whl", hash = "sha256:2d8291dbbc1cb86b8d60c86fe9cbf99ec72de28cb157cbe869c95df4d32efa96"}, - {file = "cython-3.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:e1f30a1339e03c80968a371ef76bf27a6648c5646cccd14a97e731b6957db97a"}, - {file = "cython-3.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5548573e0912d7dc80579827493315384c462e2f15797b91a8ed177686d31eb9"}, - {file = "cython-3.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bf3ea5bc50d80762c490f42846820a868a6406fdb5878ae9e4cc2f11b50228a"}, - {file = "cython-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ce53951d06ab2bca39f153d9c5add1d631c2a44d58bf67288c9d631be9724e"}, - {file = "cython-3.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e05a36224e3002d48c7c1c695b3771343bd16bc57eab60d6c5d5e08f3cbbafd8"}, - {file = "cython-3.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc0fc0777c7ab82297c01c61a1161093a22a41714f62e8c35188a309bd5db8e"}, - {file = "cython-3.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18161ef3dd0e90a944daa2be468dd27696712a5f792d6289e97d2a31298ad688"}, - {file = "cython-3.1.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ca45020950cd52d82189d6dfb6225737586be6fe7b0b9d3fadd7daca62eff531"}, - {file = "cython-3.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaae97d6d07610224be2b73a93e9e3dd85c09aedfd8e47054e3ef5a863387dae"}, - {file = "cython-3.1.2-cp311-cp311-win32.whl", hash = "sha256:3d439d9b19e7e70f6ff745602906d282a853dd5219d8e7abbf355de680c9d120"}, - {file = "cython-3.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:8efa44ee2f1876e40eb5e45f6513a19758077c56bf140623ccab43d31f873b61"}, - {file = "cython-3.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c2c4b6f9a941c857b40168b3f3c81d514e509d985c2dcd12e1a4fea9734192e"}, - {file = "cython-3.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdbc115bbe1b8c1dcbcd1b03748ea87fa967eb8dfc3a1a9bb243d4a382efcff4"}, - {file = "cython-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05111f89db1ca98edc0675cfaa62be47b3ff519a29876eb095532a9f9e052b8"}, - {file = "cython-3.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e7188df8709be32cfdfadc7c3782e361c929df9132f95e1bbc90a340dca3c7"}, - {file = "cython-3.1.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0ecc71e60a051732c2607b8eb8f2a03a5dac09b28e52b8af323c329db9987b"}, - {file = "cython-3.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f27143cf88835c8bcc9bf3304953f23f377d1d991e8942982fe7be344c7cfce3"}, - {file = "cython-3.1.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d8c43566701133f53bf13485839d8f3f309095fe0d3b9d0cd5873073394d2edc"}, - {file = "cython-3.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a3bb893e85f027a929c1764bb14db4c31cbdf8a96f59a78f608f2ba7cfbbce95"}, - {file = "cython-3.1.2-cp312-cp312-win32.whl", hash = "sha256:12c5902f105e43ca9af7874cdf87a23627f98c15d5a4f6d38bc9d334845145c0"}, - {file = "cython-3.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:06789eb7bd2e55b38b9dd349e9309f794aee0fed99c26ea5c9562d463877763f"}, - {file = "cython-3.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc22e5f18af436c894b90c257130346930fdc860d7f42b924548c591672beeef"}, - {file = "cython-3.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42c7bffb0fe9898996c7eef9eb74ce3654553c7a3a3f3da66e5a49f801904ce0"}, - {file = "cython-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88dc7fd54bfae78c366c6106a759f389000ea4dfe8ed9568af9d2f612825a164"}, - {file = "cython-3.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80d0ce057672ca50728153757d022842d5dcec536b50c79615a22dda2a874ea0"}, - {file = "cython-3.1.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eda6a43f1b78eae0d841698916eef661d15f8bc8439c266a964ea4c504f05612"}, - {file = "cython-3.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b4c516d103e87c2e9c1ab85227e4d91c7484c1ba29e25f8afbf67bae93fee164"}, - {file = "cython-3.1.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7542f1d18ab2cd22debc72974ec9e53437a20623d47d6001466e430538d7df54"}, - {file = "cython-3.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:63335513c06dcec4ecdaa8598f36c969032149ffd92a461f641ee363dc83c7ad"}, - {file = "cython-3.1.2-cp313-cp313-win32.whl", hash = "sha256:b377d542299332bfeb61ec09c57821b10f1597304394ba76544f4d07780a16df"}, - {file = "cython-3.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:8ab1319c77f15b0ae04b3fb03588df3afdec4cf79e90eeea5c961e0ebd8fdf72"}, - {file = "cython-3.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbc1f225cb9f9be7a025589463507e10bb2d76a3258f8d308e0e2d0b966c556e"}, - {file = "cython-3.1.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c1661c1701c96e1866f839e238570c96a97535a81da76a26f45f99ede18b3897"}, - {file = "cython-3.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955bc6032d89ce380458266e65dcf5ae0ed1e7c03a7a4457e3e4773e90ba7373"}, - {file = "cython-3.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b58e859889dd0fc6c3a990445b930f692948b28328bb4f3ed84b51028b7e183"}, - {file = "cython-3.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:992a6504aa3eed50dd1fc3d1fa998928b08c1188130bd526e177b6d7f3383ec4"}, - {file = "cython-3.1.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f3d03077938b02ec47a56aa156da7bfc2379193738397d4e88086db5b0a374e0"}, - {file = "cython-3.1.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b7e1d3c383a5f4ca5319248b9cb1b16a04fb36e153d651e558897171b7dbabb9"}, - {file = "cython-3.1.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:58d4d45e40cadf4f602d96b7016cf24ccfe4d954c61fa30b79813db8ccb7818f"}, - {file = "cython-3.1.2-cp38-cp38-win32.whl", hash = "sha256:919ff38a93f7c21829a519693b336979feb41a0f7ca35969402d7e211706100e"}, - {file = "cython-3.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:aca994519645ba8fb5e99c0f9d4be28d61435775552aaf893a158c583cd218a5"}, - {file = "cython-3.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe7f1ee4c13f8a773bd6c66b3d25879f40596faeab49f97d28c39b16ace5fff9"}, - {file = "cython-3.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9ec7d2baea122d94790624f743ff5b78f4e777bf969384be65b69d92fa4bc3f"}, - {file = "cython-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df57827185874f29240b02402e615547ab995d90182a852c6ec4f91bbae355a4"}, - {file = "cython-3.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1a69b9b4fe0a48a8271027c0703c71ab1993c4caca01791c0fd2e2bd9031aa"}, - {file = "cython-3.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:970cc1558519f0f108c3e2f4b3480de4945228d9292612d5b2bb687e36c646b8"}, - {file = "cython-3.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604c39cd6d152498a940aeae28b6fd44481a255a3fdf1b0051c30f3873c88b7f"}, - {file = "cython-3.1.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:855f2ae06438c7405997cf0df42d5b508ec3248272bb39df4a7a4a82a5f7c8cb"}, - {file = "cython-3.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9e3016ca7a86728bfcbdd52449521e859a977451f296a7ae4967cefa2ec498f7"}, - {file = "cython-3.1.2-cp39-cp39-win32.whl", hash = "sha256:4896fc2b0f90820ea6fcf79a07e30822f84630a404d4e075784124262f6d0adf"}, - {file = "cython-3.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:a965b81eb4f5a5f3f6760b162cb4de3907c71a9ba25d74de1ad7a0e4856f0412"}, - {file = "cython-3.1.2-py3-none-any.whl", hash = "sha256:d23fd7ffd7457205f08571a42b108a3cf993e83a59fe4d72b42e6fc592cf2639"}, - {file = "cython-3.1.2.tar.gz", hash = "sha256:6bbf7a953fa6762dfecdec015e3b054ba51c0121a45ad851fa130f63f5331381"}, -] - -[[package]] -name = "daft" -version = "0.5.18" -description = "Distributed Dataframes for Multimodal Data" -optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"daft\"" -files = [ - {file = "daft-0.5.18-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:13d52a0416b178ef860e66a0bc437b9ca386e9918846e94d759fc785c9a32d8f"}, - {file = "daft-0.5.18-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f4e27c1b3c4b85bcf4e9bbae1de3165680ca7b8d47dd3ed78fc93d116d53be21"}, - {file = "daft-0.5.18-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:f948990300f3d88ea505af49a2dc70d60720d510c5f28a05f88469ed6969efcd"}, - {file = "daft-0.5.18-cp39-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:7b6d78fc45af586dcbcbe2734b4fb628d47604b56f1a0e9be606fa63166a0ce4"}, - {file = "daft-0.5.18-cp39-abi3-win_amd64.whl", hash = "sha256:46657067776be7058b59b870faed3468e842f07a1dfb1901485edbcf7ac68cf3"}, - {file = "daft-0.5.18.tar.gz", hash = "sha256:c903847583700dd98f37a914029f88ed7915e3aa65f2b97fea832293e5814306"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Cython-3.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba67eee9413b66dd9fbacd33f0bc2e028a2a120991d77b5fd4b19d0b1e4039b9"}, + {file = "Cython-3.0.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee2717e5b5f7d966d0c6e27d2efe3698c357aa4d61bb3201997c7a4f9fe485a"}, + {file = "Cython-3.0.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cffc3464f641c8d0dda942c7c53015291beea11ec4d32421bed2f13b386b819"}, + {file = "Cython-3.0.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d3a8f81980ffbd74e52f9186d8f1654e347d0c44bfea6b5997028977f481a179"}, + {file = "Cython-3.0.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8d32856716c369d01f2385ad9177cdd1a11079ac89ea0932dc4882de1aa19174"}, + {file = "Cython-3.0.12-cp310-cp310-win32.whl", hash = "sha256:712c3f31adec140dc60d064a7f84741f50e2c25a8edd7ae746d5eb4d3ef7072a"}, + {file = "Cython-3.0.12-cp310-cp310-win_amd64.whl", hash = "sha256:d6945694c5b9170cfbd5f2c0d00ef7487a2de7aba83713a64ee4ebce7fad9e05"}, + {file = "Cython-3.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feb86122a823937cc06e4c029d80ff69f082ebb0b959ab52a5af6cdd271c5dc3"}, + {file = "Cython-3.0.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfdbea486e702c328338314adb8e80f5f9741f06a0ae83aaec7463bc166d12e8"}, + {file = "Cython-3.0.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563de1728c8e48869d2380a1b76bbc1b1b1d01aba948480d68c1d05e52d20c92"}, + {file = "Cython-3.0.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398d4576c1e1f6316282aa0b4a55139254fbed965cba7813e6d9900d3092b128"}, + {file = "Cython-3.0.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1e5eadef80143026944ea8f9904715a008f5108d1d644a89f63094cc37351e73"}, + {file = "Cython-3.0.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5a93cbda00a5451175b97dea5a9440a3fcee9e54b4cba7a7dbcba9a764b22aec"}, + {file = "Cython-3.0.12-cp311-cp311-win32.whl", hash = "sha256:3109e1d44425a2639e9a677b66cd7711721a5b606b65867cb2d8ef7a97e2237b"}, + {file = "Cython-3.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:d4b70fc339adba1e2111b074ee6119fe9fd6072c957d8597bce9a0dd1c3c6784"}, + {file = "Cython-3.0.12-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe030d4a00afb2844f5f70896b7f2a1a0d7da09bf3aa3d884cbe5f73fff5d310"}, + {file = "Cython-3.0.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7fec4f052b8fe173fe70eae75091389955b9a23d5cec3d576d21c5913b49d47"}, + {file = "Cython-3.0.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0faa5e39e5c8cdf6f9c3b1c3f24972826e45911e7f5b99cf99453fca5432f45e"}, + {file = "Cython-3.0.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d53de996ed340e9ab0fc85a88aaa8932f2591a2746e1ab1c06e262bd4ec4be7"}, + {file = "Cython-3.0.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ea3a0e19ab77266c738aa110684a753a04da4e709472cadeff487133354d6ab8"}, + {file = "Cython-3.0.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c151082884be468f2f405645858a857298ac7f7592729e5b54788b5c572717ba"}, + {file = "Cython-3.0.12-cp312-cp312-win32.whl", hash = "sha256:3083465749911ac3b2ce001b6bf17f404ac9dd35d8b08469d19dc7e717f5877a"}, + {file = "Cython-3.0.12-cp312-cp312-win_amd64.whl", hash = "sha256:c0b91c7ebace030dd558ea28730de8c580680b50768e5af66db2904a3716c3e3"}, + {file = "Cython-3.0.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4ee6f1ea1bead8e6cbc4e64571505b5d8dbdb3b58e679d31f3a84160cebf1a1a"}, + {file = "Cython-3.0.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57aefa6d3341109e46ec1a13e3a763aaa2cbeb14e82af2485b318194be1d9170"}, + {file = "Cython-3.0.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:879ae9023958d63c0675015369384642d0afb9c9d1f3473df9186c42f7a9d265"}, + {file = "Cython-3.0.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36fcd584dae547de6f095500a380f4a0cce72b7a7e409e9ff03cb9beed6ac7a1"}, + {file = "Cython-3.0.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62b79dcc0de49efe9e84b9d0e2ae0a6fc9b14691a65565da727aa2e2e63c6a28"}, + {file = "Cython-3.0.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4aa255781b093a8401109d8f2104bbb2e52de7639d5896aefafddc85c30e0894"}, + {file = "Cython-3.0.12-cp313-cp313-win32.whl", hash = "sha256:77d48f2d4bab9fe1236eb753d18f03e8b2619af5b6f05d51df0532a92dfb38ab"}, + {file = "Cython-3.0.12-cp313-cp313-win_amd64.whl", hash = "sha256:86c304b20bd57c727c7357e90d5ba1a2b6f1c45492de2373814d7745ef2e63b4"}, + {file = "Cython-3.0.12-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ff5c0b6a65b08117d0534941d404833d516dac422eee88c6b4fd55feb409a5ed"}, + {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:680f1d6ed4436ae94805db264d6155ed076d2835d84f20dcb31a7a3ad7f8668c"}, + {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc24609613fa06d0d896309f7164ba168f7e8d71c1e490ed2a08d23351c3f41"}, + {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1879c073e2b34924ce9b7ca64c212705dcc416af4337c45f371242b2e5f6d32"}, + {file = "Cython-3.0.12-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:bfb75123dd4ff767baa37d7036da0de2dfb6781ff256eef69b11b88b9a0691d1"}, + {file = "Cython-3.0.12-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:f39640f8df0400cde6882e23c734f15bb8196de0a008ae5dc6c8d1ec5957d7c8"}, + {file = "Cython-3.0.12-cp36-cp36m-win32.whl", hash = "sha256:8c9efe9a0895abee3cadfdad4130b30f7b5e57f6e6a51ef2a44f9fc66a913880"}, + {file = "Cython-3.0.12-cp36-cp36m-win_amd64.whl", hash = "sha256:63d840f2975e44d74512f8f34f1f7cb8121c9428e26a3f6116ff273deb5e60a2"}, + {file = "Cython-3.0.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:75c5acd40b97cff16fadcf6901a91586cbca5dcdba81f738efaf1f4c6bc8dccb"}, + {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e62564457851db1c40399bd95a5346b9bb99e17a819bf583b362f418d8f3457a"}, + {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ccd1228cc203b1f1b8a3d403f5a20ad1c40e5879b3fbf5851ce09d948982f2c"}, + {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25529ee948f44d9a165ff960c49d4903267c20b5edf2df79b45924802e4cca6e"}, + {file = "Cython-3.0.12-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:90cf599372c5a22120609f7d3a963f17814799335d56dd0dcf8fe615980a8ae1"}, + {file = "Cython-3.0.12-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9f8c48748a9c94ea5d59c26ab49ad0fad514d36f894985879cf3c3ca0e600bf4"}, + {file = "Cython-3.0.12-cp37-cp37m-win32.whl", hash = "sha256:3e4fa855d98bc7bd6a2049e0c7dc0dcf595e2e7f571a26e808f3efd84d2db374"}, + {file = "Cython-3.0.12-cp37-cp37m-win_amd64.whl", hash = "sha256:120681093772bf3600caddb296a65b352a0d3556e962b9b147efcfb8e8c9801b"}, + {file = "Cython-3.0.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:731d719423e041242c9303c80cae4327467299b90ffe62d4cc407e11e9ea3160"}, + {file = "Cython-3.0.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3238a29f37999e27494d120983eca90d14896b2887a0bd858a381204549137a"}, + {file = "Cython-3.0.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b588c0a089a9f4dd316d2f9275230bad4a7271e5af04e1dc41d2707c816be44b"}, + {file = "Cython-3.0.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab9f5198af74eb16502cc143cdde9ca1cbbf66ea2912e67440dd18a36e3b5fa"}, + {file = "Cython-3.0.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8ee841c0e114efa1e849c281ac9b8df8aa189af10b4a103b1c5fd71cbb799679"}, + {file = "Cython-3.0.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:43c48b5789398b228ea97499f5b864843ba9b1ab837562a9227c6f58d16ede8b"}, + {file = "Cython-3.0.12-cp38-cp38-win32.whl", hash = "sha256:5e5f17c48a4f41557fbcc7ee660ccfebe4536a34c557f553b6893c1b3c83df2d"}, + {file = "Cython-3.0.12-cp38-cp38-win_amd64.whl", hash = "sha256:309c081057930bb79dc9ea3061a1af5086c679c968206e9c9c2ec90ab7cb471a"}, + {file = "Cython-3.0.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54115fcc126840926ff3b53cfd2152eae17b3522ae7f74888f8a41413bd32f25"}, + {file = "Cython-3.0.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:629db614b9c364596d7c975fa3fb3978e8c5349524353dbe11429896a783fc1e"}, + {file = "Cython-3.0.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af081838b0f9e12a83ec4c3809a00a64c817f489f7c512b0e3ecaf5f90a2a816"}, + {file = "Cython-3.0.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:34ce459808f7d8d5d4007bc5486fe50532529096b43957af6cbffcb4d9cc5c8d"}, + {file = "Cython-3.0.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6c6cd6a75c8393e6805d17f7126b96a894f310a1a9ea91c47d141fb9341bfa8"}, + {file = "Cython-3.0.12-cp39-cp39-win32.whl", hash = "sha256:a4032e48d4734d2df68235d21920c715c451ac9de15fa14c71b378e8986b83be"}, + {file = "Cython-3.0.12-cp39-cp39-win_amd64.whl", hash = "sha256:dcdc3e5d4ce0e7a4af6903ed580833015641e968d18d528d8371e2435a34132c"}, + {file = "Cython-3.0.12-py2.py3-none-any.whl", hash = "sha256:0038c9bae46c459669390e53a1ec115f8096b2e4647ae007ff1bf4e6dee92806"}, + {file = "cython-3.0.12.tar.gz", hash = "sha256:b988bb297ce76c671e28c97d017b95411010f7c77fa6623dd0bb47eed1aee1bc"}, ] -[package.dependencies] -fsspec = "*" -pyarrow = ">=8.0.0" -tqdm = "*" -typing-extensions = {version = ">=4.0.0", markers = "python_full_version < \"3.10.0\""} - -[package.extras] -all = ["daft[aws,azure,deltalake,gcp,iceberg,numpy,pandas,ray,spark,sql,unity]"] -aws = ["boto3"] -deltalake = ["deltalake", "packaging"] -hudi = ["pyarrow (>=8.0.0)"] -iceberg = ["packaging", "pyiceberg (>=0.7.0)"] -lance = ["pylance"] -numpy = ["numpy"] -pandas = ["pandas"] -ray = ["packaging", "ray[client,data] (>=2.0.0) ; platform_system != \"Windows\"", "ray[client,data] (>=2.10.0) ; platform_system == \"Windows\""] -spark = ["googleapis-common-protos (==1.56.4)", "grpcio (>=1.48,<1.57)", "grpcio-status (>=1.48,<1.57)", "numpy (>=1.15)", "pandas (>=1.0.5)", "py4j (>=0.10.9.7)", "pyspark (==3.5.5)"] -sql = ["connectorx", "sqlalchemy", "sqlglot"] -unity = ["httpx (<=0.27.2)", "unitycatalog"] - [[package]] name = "datafusion" -version = "48.0.0" +version = "44.0.0" description = "Build and run queries against data" -optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"datafusion\"" +optional = false +python-versions = ">=3.8" files = [ - {file = "datafusion-48.0.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:24984e3c4077caca7b3746bdcf6d67171c4976325d035970b97bf59d49327c5b"}, - {file = "datafusion-48.0.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:31e841d02147b0904984850421ae18499d4ab2492ff1ef4dd9d15d3cba3fbef3"}, - {file = "datafusion-48.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6b1ed4552c496b961d648d2cbbb6a43aaae3c6442acebc795a4ef256f549cd4"}, - {file = "datafusion-48.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3d316dc339c0231588ac3f4139af490c556912c54c4508c443e3466c81ff457b"}, - {file = "datafusion-48.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:3d75026f93083febef2e8b362f56e19cfbd5d8058c61c3847f04e786697fc4bd"}, - {file = "datafusion-48.0.0.tar.gz", hash = "sha256:fcb89124db22a43e00bf5a1a4542157155d83d69589677c5309f106e83156a32"}, + {file = "datafusion-44.0.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:4786f0a09c6b422ac18c6ea095650c14454be5af3df880b5c169688f610ab41a"}, + {file = "datafusion-44.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:bbad11b33c424a658edbc52db39dfe4ddc30339ffac7c43cdc1aa128c260ae76"}, + {file = "datafusion-44.0.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ca3b47fd34e1c96cf6d40a877245afd36f3ccf8b39dda1e5b6f811f273af781"}, + {file = "datafusion-44.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:22d2e3ecf5d0b1b75c8ad48c8d9af14a0ac4de1633e86d3b397614f68aa8123c"}, + {file = "datafusion-44.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:b36774dca54a0e1c88c8080b8c72cc2df5e95f4340a0cdbdd18a0473401551c5"}, + {file = "datafusion-44.0.0.tar.gz", hash = "sha256:5fc3740406ff531527aa8baa5954fe0bf1f02ea72170e172746b38cffc0d8d50"}, ] [package.dependencies] @@ -1294,41 +1066,38 @@ typing-extensions = {version = "*", markers = "python_version < \"3.13\""} [[package]] name = "decorator" -version = "5.2.1" +version = "5.1.1" description = "Decorators for Humans" optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"gcsfs\"" +python-versions = ">=3.5" files = [ - {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, - {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] [[package]] name = "deptry" -version = "0.23.1" +version = "0.23.0" description = "A command line utility to check for unused, missing and transitive dependencies in a Python project." optional = false python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "deptry-0.23.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f0b231d098fb5b48d8973c9f192c353ffdd395770063424969fa7f15ddfea7d8"}, - {file = "deptry-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:bf057f514bb2fa18a2b192a7f7372bd14577ff46b11486933e8383dfef461983"}, - {file = "deptry-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ee3f5663bb1c048e2aaf25a4d9e6d09cc1f3b3396ee248980878c6a6c9c0e21"}, - {file = "deptry-0.23.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae0366dc5f50a5fb29cf90de1110c5e368513de6c1b2dac439f2817f3f752616"}, - {file = "deptry-0.23.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ab156a90a9eda5819aeb1c1da585dd4d5ec509029399a38771a49e78f40db90f"}, - {file = "deptry-0.23.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:651c7eb168233755152fcc468713c024d64a03069645187edb4a17ba61ce6133"}, - {file = "deptry-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:8da1e8f70e7086ebc228f3a4a3cfb5aa127b09b5eef60d694503d6bb79809025"}, - {file = "deptry-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:f589497a5809717db4dcf2aa840f2847c0a4c489331608e538850b6a9ab1c30b"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6af91d86380ef703adb6ae65f273d88e3cca7fd315c4c309da857a0cfa728244"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:42a249d317c3128c286035a1f7aaa41a0c3c967f17848817c2e07ca50d5ed450"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d988c7c75201997970bae1e8d564b4c7a14d350556c4f7c269fd33f3b081c314"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae13d8e65ae88b77632c45edb4038301a6f9efcac06715abfde9a029e5879698"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:40058a7a3fe9dacb745668897ee992e58daf5aac406b668ff2eaaf0f6f586550"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d111cf4261eeadbdb20051d8d542f04deb3cfced0cb280ece8d654f7f6055921"}, - {file = "deptry-0.23.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9f9bbb92f95ada9ccfa5ecefee05ba3c39cfa0734b5483a3a1a3c4eeb9c99054"}, - {file = "deptry-0.23.1.tar.gz", hash = "sha256:5d23e0ef25f3c56405c05383a476edda55944563c5c47a3e9249ed3ec860d382"}, +files = [ + {file = "deptry-0.23.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1f2a6817a37d76e8f6b667381b7caf6ea3e6d6c18b5be24d36c625f387c79852"}, + {file = "deptry-0.23.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:9601b64cc0aed42687fdd5c912d5f1e90d7f7333fb589b14e35bfdfebae866f3"}, + {file = "deptry-0.23.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6172b2205f6e84bcc9df25226693d4deb9576a6f746c2ace828f6d13401d357"}, + {file = "deptry-0.23.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cfa4b3a46ee8a026eaa38e4b9ba43fe6036a07fe16bf0a663cb611b939f6af8"}, + {file = "deptry-0.23.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9d03cc99a61c348df92074a50e0a71b28f264f0edbf686084ca90e6fd44e3abe"}, + {file = "deptry-0.23.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9a46f78098f145100dc582a59af8548b26cdfa16cf0fbd85d2d44645e724cb6a"}, + {file = "deptry-0.23.0-cp39-abi3-win_amd64.whl", hash = "sha256:d53e803b280791d89a051b6183d9dc40411200e22a8ab7e6c32c6b169822a664"}, + {file = "deptry-0.23.0-cp39-abi3-win_arm64.whl", hash = "sha256:da7678624f4626d839c8c03675452cefc59d6cf57d25c84a9711dae514719279"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:40706dcbed54141f2d23afa70a272171c8c46531cd6f0f9c8ef482c906b3cee2"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:889541844092f18e7b48631852195f36c25c5afd4d7e074b19ba824b430add50"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aff9156228eb16cd81792f920c1623c00cb59091ae572600ba0eac587da33c0c"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:583154732cfd438a4a090b7d13d8b2016f1ac2732534f34fb689345768d8538b"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:736e7bc557aec6118b2a4d454f0d81f070782faeaa9d8d3c9a15985c9f265372"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5f7e4b1a5232ed6d352fca7173750610a169377d1951d3e9782947191942a765"}, + {file = "deptry-0.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:04afae204654542406318fd3dd6f4a6697579597f37195437daf84a53ee0ebbf"}, + {file = "deptry-0.23.0.tar.gz", hash = "sha256:4915a3590ccf38ad7a9176aee376745aa9de121f50f8da8fb9ccec87fa93e676"}, ] [package.dependencies] @@ -1336,7 +1105,7 @@ click = ">=8.0.0,<9" colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} packaging = ">=23.2" requirements-parser = ">=0.11.0,<1" -tomli = {version = ">=2.0.1", markers = "python_full_version < \"3.11.0\""} +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [[package]] name = "distlib" @@ -1344,7 +1113,6 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -1356,7 +1124,6 @@ version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -1375,14 +1142,13 @@ websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "docutils" -version = "0.22" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "docutils-0.22-py3-none-any.whl", hash = "sha256:4ed966a0e96a0477d852f7af31bdcb3adc049fbb35ccba358c2ea8a03287615e"}, - {file = "docutils-0.22.tar.gz", hash = "sha256:ba9d57750e92331ebe7c08a1bbf7a7f8143b86c476acd51528b042216a6aad0f"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] @@ -1391,7 +1157,6 @@ version = "3.10.0" description = "Helpful functions for Python 🐍 🛠️" optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "domdf_python_tools-3.10.0-py3-none-any.whl", hash = "sha256:5e71c1be71bbcc1f881d690c8984b60e64298ec256903b3147f068bc33090c36"}, {file = "domdf_python_tools-3.10.0.tar.gz", hash = "sha256:2ae308d2f4f1e9145f5f4ba57f840fbfd1c2983ee26e4824347789649d3ae298"}, @@ -1407,114 +1172,116 @@ dates = ["pytz (>=2019.1)"] [[package]] name = "duckdb" -version = "1.3.2" +version = "1.2.0" description = "DuckDB in-process database" optional = true python-versions = ">=3.7.0" -groups = ["main"] -markers = "extra == \"duckdb\"" -files = [ - {file = "duckdb-1.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:14676651b86f827ea10bf965eec698b18e3519fdc6266d4ca849f5af7a8c315e"}, - {file = "duckdb-1.3.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e584f25892450757919639b148c2410402b17105bd404017a57fa9eec9c98919"}, - {file = "duckdb-1.3.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:84a19f185ee0c5bc66d95908c6be19103e184b743e594e005dee6f84118dc22c"}, - {file = "duckdb-1.3.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:186fc3f98943e97f88a1e501d5720b11214695571f2c74745d6e300b18bef80e"}, - {file = "duckdb-1.3.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b7e6bb613b73745f03bff4bb412f362d4a1e158bdcb3946f61fd18e9e1a8ddf"}, - {file = "duckdb-1.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1c90646b52a0eccda1f76b10ac98b502deb9017569e84073da00a2ab97763578"}, - {file = "duckdb-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:4cdffb1e60defbfa75407b7f2ccc322f535fd462976940731dfd1644146f90c6"}, - {file = "duckdb-1.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e1872cf63aae28c3f1dc2e19b5e23940339fc39fb3425a06196c5d00a8d01040"}, - {file = "duckdb-1.3.2-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:db256c206056468ae6a9e931776bdf7debaffc58e19a0ff4fa9e7e1e82d38b3b"}, - {file = "duckdb-1.3.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:1d57df2149d6e4e0bd5198689316c5e2ceec7f6ac0a9ec11bc2b216502a57b34"}, - {file = "duckdb-1.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:54f76c8b1e2a19dfe194027894209ce9ddb073fd9db69af729a524d2860e4680"}, - {file = "duckdb-1.3.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45bea70b3e93c6bf766ce2f80fc3876efa94c4ee4de72036417a7bd1e32142fe"}, - {file = "duckdb-1.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:003f7d36f0d8a430cb0e00521f18b7d5ee49ec98aaa541914c6d0e008c306f1a"}, - {file = "duckdb-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:0eb210cedf08b067fa90c666339688f1c874844a54708562282bc54b0189aac6"}, - {file = "duckdb-1.3.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2455b1ffef4e3d3c7ef8b806977c0e3973c10ec85aa28f08c993ab7f2598e8dd"}, - {file = "duckdb-1.3.2-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:9d0ae509713da3461c000af27496d5413f839d26111d2a609242d9d17b37d464"}, - {file = "duckdb-1.3.2-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:72ca6143d23c0bf6426396400f01fcbe4785ad9ceec771bd9a4acc5b5ef9a075"}, - {file = "duckdb-1.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b49a11afba36b98436db83770df10faa03ebded06514cb9b180b513d8be7f392"}, - {file = "duckdb-1.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36abdfe0d1704fe09b08d233165f312dad7d7d0ecaaca5fb3bb869f4838a2d0b"}, - {file = "duckdb-1.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3380aae1c4f2af3f37b0bf223fabd62077dd0493c84ef441e69b45167188e7b6"}, - {file = "duckdb-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:11af73963ae174aafd90ea45fb0317f1b2e28a7f1d9902819d47c67cc957d49c"}, - {file = "duckdb-1.3.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a3418c973b06ac4e97f178f803e032c30c9a9f56a3e3b43a866f33223dfbf60b"}, - {file = "duckdb-1.3.2-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:2a741eae2cf110fd2223eeebe4151e22c0c02803e1cfac6880dbe8a39fecab6a"}, - {file = "duckdb-1.3.2-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:51e62541341ea1a9e31f0f1ade2496a39b742caf513bebd52396f42ddd6525a0"}, - {file = "duckdb-1.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e519de5640e5671f1731b3ae6b496e0ed7e4de4a1c25c7a2f34c991ab64d71"}, - {file = "duckdb-1.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4732fb8cc60566b60e7e53b8c19972cb5ed12d285147a3063b16cc64a79f6d9f"}, - {file = "duckdb-1.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97f7a22dcaa1cca889d12c3dc43a999468375cdb6f6fe56edf840e062d4a8293"}, - {file = "duckdb-1.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:cd3d717bf9c49ef4b1016c2216517572258fa645c2923e91c5234053defa3fb5"}, - {file = "duckdb-1.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:18862e3b8a805f2204543d42d5f103b629cb7f7f2e69f5188eceb0b8a023f0af"}, - {file = "duckdb-1.3.2-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:75ed129761b6159f0b8eca4854e496a3c4c416e888537ec47ff8eb35fda2b667"}, - {file = "duckdb-1.3.2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:875193ae9f718bc80ab5635435de5b313e3de3ec99420a9b25275ddc5c45ff58"}, - {file = "duckdb-1.3.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09b5fd8a112301096668903781ad5944c3aec2af27622bd80eae54149de42b42"}, - {file = "duckdb-1.3.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10cb87ad964b989175e7757d7ada0b1a7264b401a79be2f828cf8f7c366f7f95"}, - {file = "duckdb-1.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4389fc3812e26977034fe3ff08d1f7dbfe6d2d8337487b4686f2b50e254d7ee3"}, - {file = "duckdb-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:07952ec6f45dd3c7db0f825d231232dc889f1f2490b97a4e9b7abb6830145a19"}, - {file = "duckdb-1.3.2.tar.gz", hash = "sha256:c658df8a1bc78704f702ad0d954d82a1edd4518d7a04f00027ec53e40f591ff5"}, +files = [ + {file = "duckdb-1.2.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:7452d3655063cc3062504b5b22f8968acb96ffcdc6c2b8207bbec9da1de1f884"}, + {file = "duckdb-1.2.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:28d40a269212270e08b8541ea0922c3a893407897481cd484ad896bc2ba77a00"}, + {file = "duckdb-1.2.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:ed4586aa441a57f68e5fa5655b8a86509e1c3b6521ad4d40455ae4594e18cd59"}, + {file = "duckdb-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07823a485bc656cf2f63020117fec5245aa7fb8d085a43700208ac8b7e728866"}, + {file = "duckdb-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3b86475373cbd000035f34ba02420bc8ff432eaa646b09c5de975610120155d"}, + {file = "duckdb-1.2.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be7a14d1380ea8345b27bf5bbe77209c14ee0277c7401f504a2519936f9d087e"}, + {file = "duckdb-1.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c491485a14f806d12407d85510da8f09ad5d9a079ec449b7bff75eea5f9431c3"}, + {file = "duckdb-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:c8f6c09c939deb0bccaa6485798dacef0969046d1aa845ef10063558c8ee14e0"}, + {file = "duckdb-1.2.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:970a396b133608b5acb297cc172097866abbbce6cc57a2ec6b128b4f99a63ecd"}, + {file = "duckdb-1.2.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:ecd713a8388c1e173ef04aa5545873e93d44cb950c2af5459b44668676abc873"}, + {file = "duckdb-1.2.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:9e1323ab11ca9ee72bb3c54dfb4919add4b2aa524085bac80c2a888ce673cdf0"}, + {file = "duckdb-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c22e4ddcf1a76b4cf90cac23de06910557b239b4ba783e6dec1e04210de897e9"}, + {file = "duckdb-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f2b0fbe63786061b028f48e41efcecfdcf3d5f8cb5ce415ee1d5885691c19f"}, + {file = "duckdb-1.2.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6dc9fd4c6f3505d7d69eed05d26a345d9652a4dab791b6d95ac18d6cdda2041"}, + {file = "duckdb-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4788c1f6d588be232b4a9dbc2c4a3546cd1ced945a1182d785cf913a5bd122a3"}, + {file = "duckdb-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:eeb5a517445d18949610cd30da1215303693cdae2942e6b1b7661314380f715e"}, + {file = "duckdb-1.2.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c0427501908d3b4fe464913b0ae2418ff52d1fa24b3982d864169b1d54b6bbee"}, + {file = "duckdb-1.2.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:33df2430f35e68739fd9fb6bbe1a5f86f4f46b362c9abb3a3f74a989560ef597"}, + {file = "duckdb-1.2.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:fd8ca2910efb85f0dd0d50383eaed9b6b7e86e6cacb032c36712e84265855e58"}, + {file = "duckdb-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9747d3d2a3290c795c0343eb927dbc75ca78d0440726824c2a39d9634fba9394"}, + {file = "duckdb-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91704accb74223267ae226f3470d71f7ad824549482b3f7fc91710a9fe5a1152"}, + {file = "duckdb-1.2.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9357737c6699b1f57e1d02b299371b2634bf08927d4e8386146ec5e4d1ebb31"}, + {file = "duckdb-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8d61ba5272dd1bf772b7a74f4964e83080602f8f6e9a46a0fa7203a4e0e05249"}, + {file = "duckdb-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:f317cfa2f6ff3bc209985715669f4b8dd601faa69e46a206163e53b8db61a1d1"}, + {file = "duckdb-1.2.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7feaaf185e89138e3d0f4dc9bf647767df1a3f080b4be719837613cb9288b79e"}, + {file = "duckdb-1.2.0-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:a52bb5991656cab0b90537c5e05746019be09c5f63535db03ddbff6f65b2ccb3"}, + {file = "duckdb-1.2.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:4d10d5596667f82812b130f3e7fffb282a31c05525ee2f8adddfaa1a07529fe9"}, + {file = "duckdb-1.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436b7c0cd40a63fdce8477b03868026b60b2376cf155812be07392213b707874"}, + {file = "duckdb-1.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6b8464d9bd5770071d4a00a457b4c09974b930ccb1fe99991cfa8ddda0b905"}, + {file = "duckdb-1.2.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2835bc4828d2e1f8ad58f8ef946815af8beb55f9697e6e9d5a028b81abc02c62"}, + {file = "duckdb-1.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b35284599ac6bf6a09ffd334bc7f4d5df47126bce054a0f73b53f3eac1a5688e"}, + {file = "duckdb-1.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:5cf770fdd5244e47b3cbca6dd4ef2d13b6b9a6071f3fc7b55487e9ddff19e9cd"}, + {file = "duckdb-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ccd37c1c000f2a3a7e8852d9cc64de4549ab484d4ecc05f8a3df76443d3b8"}, + {file = "duckdb-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89d0111609383bd440f1afe2b540969ec02cd1e11959df0313efb644c14d061"}, + {file = "duckdb-1.2.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:304c5395f9bd788b1e35a71407b80e3af116daa77b05dc417a6deb986ffd4def"}, + {file = "duckdb-1.2.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:6effb33a2bed59ddaa53cb5e3cfb2ad47e2fb98a156f49073df7c755394ab52a"}, + {file = "duckdb-1.2.0-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:a405579b402e49ad5b52e58953e29a489c4f611a0c768088a50a086baea5e134"}, + {file = "duckdb-1.2.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb4ce9c6cfc0f45d1cf827e5a10294fdfd235e221aeebf10d3a31e898e3a2e0e"}, + {file = "duckdb-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:648e2179e1a56cca884c1c993d12f07807f5a285d78972cb3a001736c8f6d332"}, + {file = "duckdb-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b88bf1cc28d76e23534ae1485c5fefcac610ee98f61b378ec255530387fbf93"}, + {file = "duckdb-1.2.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4548e068e8dfbda5839c3a5ed1f036f0773d984d02d933ee54395c864228fe9b"}, + {file = "duckdb-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:a679ab1ab14bc2adf9ce8bc06ae64b9312a63b93925becc8959ff38350d913de"}, + {file = "duckdb-1.2.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f802ddf4d87d319fd957d5dbc283db750c970909b6456bd3e3a51f61e153b524"}, + {file = "duckdb-1.2.0-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:238093c290e63f010684a970e1af0780f8974b3a812b4f6a734d78a73658bd3d"}, + {file = "duckdb-1.2.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a7d2577229c699431f620bdd1e97175e558f8bfd0f56fa6bcc41f13841148b91"}, + {file = "duckdb-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8336c9e4c66ab7fd1ba8786a2551f96f2bbc9a8d6d86f109c5d4c86634635e4f"}, + {file = "duckdb-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d01a72a4c6ba78882bc5d184b0768c9ac4351406af3e43a9da5810400acbdee"}, + {file = "duckdb-1.2.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b4d0b997702f74669ffb43283f3291ee05ca464b68deabee9a365cd40fc729e"}, + {file = "duckdb-1.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69ce703855e30aa253bf47a4002ee35a7c63ff970306879ae76ab355bfe03632"}, + {file = "duckdb-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:a58c0763068fac7cf202a5ac9c0f85c0b6044a98185d73b5f049f955fd10b4e8"}, + {file = "duckdb-1.2.0.tar.gz", hash = "sha256:a5ce81828e6d1c3f06836d3bda38eef8355765f08ad5ce239abd6f56934dd1f8"}, ] [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["dev"] -markers = "python_version <= \"3.10\"" files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} - [package.extras] test = ["pytest (>=6)"] [[package]] name = "fastavro" -version = "1.12.0" +version = "1.10.0" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "fastavro-1.12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e38497bd24136aad2c47376ee958be4f5b775d6f03c11893fc636eea8c1c3b40"}, - {file = "fastavro-1.12.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e8d8401b021f4b3dfc05e6f82365f14de8d170a041fbe3345f992c9c13d4f0ff"}, - {file = "fastavro-1.12.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531b89117422db967d4e1547b34089454e942341e50331fa71920e9d5e326330"}, - {file = "fastavro-1.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae541edbc6091b890532d3e50d7bcdd324219730598cf9cb4522d1decabde37e"}, - {file = "fastavro-1.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:585a11f612eaadb0dcb1d3d348b90bd0d0d3ee4cf9abafd8b319663e8a0e1dcc"}, - {file = "fastavro-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:425fb96fbfbc06a0cc828946dd2ae9d85a5f9ff836af033d8cb963876ecb158e"}, - {file = "fastavro-1.12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56f78d1d527bea4833945c3a8c716969ebd133c5762e2e34f64c795bd5a10b3e"}, - {file = "fastavro-1.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7ce0d117642bb4265ef6e1619ec2d93e942a98f60636e3c0fbf1eb438c49026"}, - {file = "fastavro-1.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52e9d9648aad4cca5751bcbe2d3f98e85afb0ec6c6565707f4e2f647ba83ba85"}, - {file = "fastavro-1.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6183875381ec1cf85a1891bf46696fd1ec2ad732980e7bccc1e52e9904e7664d"}, - {file = "fastavro-1.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5ad00a2b94d3c8bf9239acf92d56e3e457e1d188687a8d80f31e858ccf91a6d6"}, - {file = "fastavro-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:6c4d1c276ff1410f3830648bb43312894ad65709ca0cb54361e28954387a46ac"}, - {file = "fastavro-1.12.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e849c70198e5bdf6f08df54a68db36ff72bd73e8f14b1fd664323df073c496d8"}, - {file = "fastavro-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b260e1cdc9a77853a2586b32208302c08dddfb5c20720b5179ac5330e06ce698"}, - {file = "fastavro-1.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:181779688d8b80957953031f0d82ec0761be667a78e03dac642511ff996c771a"}, - {file = "fastavro-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6881caf914b36a57d1f90810f04a89bd9c837dd4a48e1b66a8b92136e85c415d"}, - {file = "fastavro-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8bf638248499eb78c422f12fedc08f9b90b5646c3368415e388691db60e7defb"}, - {file = "fastavro-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ed4f18b7c2f651a5ee2233676f62aac332995086768301aa2c1741859d70b53e"}, - {file = "fastavro-1.12.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dbe2b690d9caba7d888126cc1dd980a8fcf5ee73de41a104e3f15bb5e08c19c8"}, - {file = "fastavro-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07ff9e6c6e8739203ccced3205646fdac6141c2efc83f4dffabf5f7d0176646d"}, - {file = "fastavro-1.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a172655add31882cab4e1a96b7d49f419906b465b4c2165081db7b1db79852f"}, - {file = "fastavro-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:be20ce0331b70b35dca1a4c7808afeedf348dc517bd41602ed8fc9a1ac2247a9"}, - {file = "fastavro-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a52906681384a18b99b47e5f9eab64b4744d6e6bc91056b7e28641c7b3c59d2b"}, - {file = "fastavro-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf153531191bcfc445c21e05dd97232a634463aa717cf99fb2214a51b9886bff"}, - {file = "fastavro-1.12.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1928e88a760688e490118e1bedf0643b1f3727e5ba59c07ac64638dab81ae2a1"}, - {file = "fastavro-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd51b706a3ab3fe4af84a0b37f60d1bcd79295df18932494fc9f49db4ba2bab2"}, - {file = "fastavro-1.12.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1148263931f6965e1942cf670f146148ca95b021ae7b7e1f98bf179f1c26cc58"}, - {file = "fastavro-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4099e0f6fb8a55f59891c0aed6bfa90c4d20a774737e5282c74181b4703ea0cb"}, - {file = "fastavro-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:10c586e9e3bab34307f8e3227a2988b6e8ac49bff8f7b56635cf4928a153f464"}, - {file = "fastavro-1.12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:638bb234821d7377d27a23bfee5bd89dadbb956c483a27acabea813c5b3e4b58"}, - {file = "fastavro-1.12.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f47a4777b6ebfeef60c5d3c7e850a32e3ec5c8727ccf90436ecdfd887815ac16"}, - {file = "fastavro-1.12.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:58f743697aa63a359538b7258f5956f4f1a83d3cd4eb3c8b3c3a99b3385e4cfb"}, - {file = "fastavro-1.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ab6744c62dd65c3507375a489680c97c93504ec37892c51c592d9f2c441a93a7"}, - {file = "fastavro-1.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bf6bbfcef12942b45220cb7dcd222daed21223d4a02e8361570da0bedabcbc95"}, - {file = "fastavro-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:c7f025c69f13e34fc7281c688fedd8d4e7633eb15891ea630891ee34911bdfc2"}, - {file = "fastavro-1.12.0.tar.gz", hash = "sha256:a67a87be149825d74006b57e52be068dfa24f3bfc6382543ec92cd72327fe152"}, +files = [ + {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, + {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, + {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190e80dc7d77d03a6a8597a026146b32a0bbe45e3487ab4904dc8c1bebecb26d"}, + {file = "fastavro-1.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bf570d63be9155c3fdc415f60a49c171548334b70fff0679a184b69c29b6bc61"}, + {file = "fastavro-1.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e07abb6798e95dccecaec316265e35a018b523d1f3944ad396d0a93cb95e0a08"}, + {file = "fastavro-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:37203097ed11d0b8fd3c004904748777d730cafd26e278167ea602eebdef8eb2"}, + {file = "fastavro-1.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d183c075f527ab695a27ae75f210d4a86bce660cda2f85ae84d5606efc15ef50"}, + {file = "fastavro-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a95a2c0639bffd7c079b59e9a796bfc3a9acd78acff7088f7c54ade24e4a77"}, + {file = "fastavro-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a678153b5da1b024a32ec3f611b2e7afd24deac588cb51dd1b0019935191a6d"}, + {file = "fastavro-1.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:67a597a5cfea4dddcf8b49eaf8c2b5ffee7fda15b578849185bc690ec0cd0d8f"}, + {file = "fastavro-1.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fd689724760b17f69565d8a4e7785ed79becd451d1c99263c40cb2d6491f1d4"}, + {file = "fastavro-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:4f949d463f9ac4221128a51e4e34e2562f401e5925adcadfd28637a73df6c2d8"}, + {file = "fastavro-1.10.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cfe57cb0d72f304bd0dcc5a3208ca6a7363a9ae76f3073307d095c9d053b29d4"}, + {file = "fastavro-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e517440c824cb65fb29d3e3903a9406f4d7c75490cef47e55c4c82cdc66270"}, + {file = "fastavro-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203c17d44cadde76e8eecb30f2d1b4f33eb478877552d71f049265dc6f2ecd10"}, + {file = "fastavro-1.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6575be7f2b5f94023b5a4e766b0251924945ad55e9a96672dc523656d17fe251"}, + {file = "fastavro-1.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe471deb675ed2f01ee2aac958fbf8ebb13ea00fa4ce7f87e57710a0bc592208"}, + {file = "fastavro-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:567ff515f2a5d26d9674b31c95477f3e6022ec206124c62169bc2ffaf0889089"}, + {file = "fastavro-1.10.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:82263af0adfddb39c85f9517d736e1e940fe506dfcc35bc9ab9f85e0fa9236d8"}, + {file = "fastavro-1.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:566c193109ff0ff84f1072a165b7106c4f96050078a4e6ac7391f81ca1ef3efa"}, + {file = "fastavro-1.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e400d2e55d068404d9fea7c5021f8b999c6f9d9afa1d1f3652ec92c105ffcbdd"}, + {file = "fastavro-1.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b8227497f71565270f9249fc9af32a93644ca683a0167cfe66d203845c3a038"}, + {file = "fastavro-1.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e62d04c65461b30ac6d314e4197ad666371e97ae8cb2c16f971d802f6c7f514"}, + {file = "fastavro-1.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:86baf8c9740ab570d0d4d18517da71626fe9be4d1142bea684db52bd5adb078f"}, + {file = "fastavro-1.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5bccbb6f8e9e5b834cca964f0e6ebc27ebe65319d3940b0b397751a470f45612"}, + {file = "fastavro-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0132f6b0b53f61a0a508a577f64beb5de1a5e068a9b4c0e1df6e3b66568eec4"}, + {file = "fastavro-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca37a363b711202c6071a6d4787e68e15fa3ab108261058c4aae853c582339af"}, + {file = "fastavro-1.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cf38cecdd67ca9bd92e6e9ba34a30db6343e7a3bedf171753ee78f8bd9f8a670"}, + {file = "fastavro-1.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f4dd10e0ed42982122d20cdf1a88aa50ee09e5a9cd9b39abdffb1aa4f5b76435"}, + {file = "fastavro-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:aaef147dc14dd2d7823246178fd06fc5e477460e070dc6d9e07dd8193a6bc93c"}, + {file = "fastavro-1.10.0.tar.gz", hash = "sha256:47bf41ac6d52cdfe4a3da88c75a802321321b37b663a900d12765101a5d6886f"}, ] [package.extras] @@ -1525,42 +1292,38 @@ zstandard = ["zstandard"] [[package]] name = "filelock" -version = "3.18.0" +version = "3.17.0" description = "A platform independent file lock." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, - {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, + {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, + {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] -markers = {main = "extra == \"ray\" or extra == \"hf\""} [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "flask" -version = "3.1.1" +version = "3.1.0" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c"}, - {file = "flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e"}, + {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"}, + {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"}, ] [package.dependencies] -blinker = ">=1.9.0" +blinker = ">=1.9" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} -itsdangerous = ">=2.2.0" -jinja2 = ">=3.1.2" -markupsafe = ">=2.1.1" -werkzeug = ">=3.1.0" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.1" [package.extras] async = ["asgiref (>=3.2)"] @@ -1568,145 +1331,128 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-cors" -version = "6.0.1" -description = "A Flask extension simplifying CORS support" +version = "5.0.0" +description = "A Flask extension adding a decorator for CORS support" optional = false -python-versions = "<4.0,>=3.9" -groups = ["dev"] +python-versions = "*" files = [ - {file = "flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c"}, - {file = "flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db"}, + {file = "Flask_Cors-5.0.0-py2.py3-none-any.whl", hash = "sha256:b9e307d082a9261c100d8fb0ba909eec6a228ed1b60a8315fd85f783d61910bc"}, + {file = "flask_cors-5.0.0.tar.gz", hash = "sha256:5aadb4b950c4e93745034594d9f3ea6591f734bb3662e16e255ffbf5e89c88ef"}, ] [package.dependencies] -flask = ">=0.9" -Werkzeug = ">=0.7" +Flask = ">=0.9" [[package]] name = "frozenlist" -version = "1.7.0" +version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "(extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\") and (extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\" or extra == \"ray\")" -files = [ - {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}, - {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}, - {file = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"}, - {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"}, - {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"}, - {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"}, - {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"}, - {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"}, - {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"}, - {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"}, - {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"}, - {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"}, - {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"}, - {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"}, - {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"}, - {file = "frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}, - {file = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}, - {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"}, - {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"}, - {file = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"}, - {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"}, - {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"}, - {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"}, - {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"}, - {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"}, - {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"}, - {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"}, - {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"}, - {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"}, - {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"}, - {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"}, - {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"}, - {file = "frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"}, - {file = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"}, - {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2"}, - {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb"}, - {file = "frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478"}, - {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8"}, - {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08"}, - {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4"}, - {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b"}, - {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e"}, - {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca"}, - {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df"}, - {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5"}, - {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025"}, - {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01"}, - {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08"}, - {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}, - {file = "frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}, - {file = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}, - {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee"}, - {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d"}, - {file = "frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43"}, - {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d"}, - {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee"}, - {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb"}, - {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f"}, - {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60"}, - {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00"}, - {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b"}, - {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c"}, - {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949"}, - {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca"}, - {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b"}, - {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e"}, - {file = "frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1"}, - {file = "frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba"}, - {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d"}, - {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d"}, - {file = "frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b"}, - {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146"}, - {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74"}, - {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1"}, - {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1"}, - {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384"}, - {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb"}, - {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c"}, - {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65"}, - {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3"}, - {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657"}, - {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104"}, - {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf"}, - {file = "frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81"}, - {file = "frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e"}, - {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"}, - {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"}, - {file = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"}, - {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"}, - {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"}, - {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"}, - {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"}, - {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"}, - {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"}, - {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"}, - {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"}, - {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"}, - {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"}, - {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"}, - {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"}, - {file = "frozenlist-1.7.0-cp39-cp39-win32.whl", hash = "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"}, - {file = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"}, - {file = "frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}, - {file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"}, +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, ] [[package]] name = "fsspec" -version = "2025.5.1" +version = "2025.2.0" description = "File-system specification" optional = false -python-versions = ">=3.9" -groups = ["main"] +python-versions = ">=3.8" files = [ - {file = "fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462"}, - {file = "fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475"}, + {file = "fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b"}, + {file = "fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd"}, ] [package.extras] @@ -1739,21 +1485,19 @@ tqdm = ["tqdm"] [[package]] name = "gcsfs" -version = "2025.5.1" +version = "2025.2.0" description = "Convenient Filesystem interface over GCS" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "gcsfs-2025.5.1-py2.py3-none-any.whl", hash = "sha256:48712471ff71ac83d3e2152ba4dc232874698466e344d5e700feba06b0a0de7b"}, - {file = "gcsfs-2025.5.1.tar.gz", hash = "sha256:ba945530cf4857cd9d599ccb3ae729c65c39088880b11c4df1fecac30df5f3e3"}, + {file = "gcsfs-2025.2.0-py2.py3-none-any.whl", hash = "sha256:293fc0bd40402f954b2f3edc7289116ece3995525abc04473834fcdd3f220bd9"}, + {file = "gcsfs-2025.2.0.tar.gz", hash = "sha256:1013b3f1213d867fffc732dbf1d963127dfa6e5e863f8077696b892696b3e3d9"}, ] [package.dependencies] aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" decorator = ">4.1.2" -fsspec = "2025.5.1" +fsspec = "2025.2.0" google-auth = ">=1.2" google-auth-oauthlib = "*" google-cloud-storage = "*" @@ -1763,13 +1507,46 @@ requests = "*" crc = ["crcmod"] gcsfuse = ["fusepy"] +[[package]] +name = "getdaft" +version = "0.4.4" +description = "Distributed Dataframes for Multimodal Data" +optional = true +python-versions = ">=3.9" +files = [ + {file = "getdaft-0.4.4-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1b8d4bcb28360cbc2a1a68fa2190b879eae6413938136f89d8e96810d38b0958"}, + {file = "getdaft-0.4.4-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:5f51a0406425398992b9d5203978a248e80c0b65027459c4e852bc92df9e1676"}, + {file = "getdaft-0.4.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e84fd7168daf1e8c9a0fb012b7b91cd6bed5259a69207a0a0a3367345f7b78e6"}, + {file = "getdaft-0.4.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac26a29c6a732f56898731b982da1949fb0106bd9d33348421b5d9b253fcc1"}, + {file = "getdaft-0.4.4-cp39-abi3-win_amd64.whl", hash = "sha256:576ccfc762d30a4c5003f98994e36b14d7afb1b4bea59d36d61efd277d863d11"}, + {file = "getdaft-0.4.4.tar.gz", hash = "sha256:53d15b38ab6ac0a516200dfd60e1f517437d9b973f6dd06ebccdff9f8b293fe1"}, +] + +[package.dependencies] +fsspec = "*" +pyarrow = ">=8.0.0" +tqdm = "*" +typing-extensions = {version = ">=4.0.0", markers = "python_full_version < \"3.10\""} + +[package.extras] +all = ["getdaft[aws,azure,deltalake,gcp,iceberg,numpy,pandas,ray,sql,unity]"] +aws = ["boto3"] +deltalake = ["deltalake"] +hudi = ["pyarrow (>=8.0.0)"] +iceberg = ["packaging", "pyiceberg (>=0.4.0)"] +lance = ["lancedb"] +numpy = ["numpy"] +pandas = ["pandas"] +ray = ["packaging", "ray[client,data] (>=2.0.0)", "ray[client,data] (>=2.10.0)"] +sql = ["connectorx", "sqlalchemy", "sqlglot"] +unity = ["unitycatalog"] + [[package]] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." optional = false python-versions = "*" -groups = ["docs"] files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, @@ -1783,44 +1560,40 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "google-api-core" -version = "2.25.1" +version = "2.24.1" description = "Google API client core library" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7"}, - {file = "google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8"}, + {file = "google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1"}, + {file = "google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a"}, ] [package.dependencies] -google-auth = ">=2.14.1,<3.0.0" -googleapis-common-protos = ">=1.56.2,<2.0.0" +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" proto-plus = [ - {version = ">=1.22.3,<2.0.0"}, - {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, + {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, + {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, ] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" -requests = ">=2.18.0,<3.0.0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] -grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0) ; python_version >= \"3.11\""] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.40.3" +version = "2.38.0" description = "Google Authentication Library" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\" or extra == \"gcp-auth\"" files = [ - {file = "google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca"}, - {file = "google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77"}, + {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, + {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, ] [package.dependencies] @@ -1829,26 +1602,22 @@ pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] enterprise-cert = ["cryptography", "pyopenssl"] -pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] -pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +pyjwt = ["cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0)"] -testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] -urllib3 = ["packaging", "urllib3"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-oauthlib" -version = "1.2.2" +version = "1.2.1" description = "Google Authentication Library" optional = true python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2"}, - {file = "google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684"}, + {file = "google_auth_oauthlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:2d58a27262d55aa1b87678c3ba7142a080098cbc2024f903c62355deb235d91f"}, + {file = "google_auth_oauthlib-1.2.1.tar.gz", hash = "sha256:afd0cad092a2eaa53cd8e8298557d6de1034c6cb4a740500b5357b648af97263"}, ] [package.dependencies] @@ -1860,15 +1629,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "google-cloud-core" -version = "2.4.3" +version = "2.4.1" description = "Google Cloud API client core library" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e"}, - {file = "google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53"}, + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, ] [package.dependencies] @@ -1880,72 +1647,61 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "3.2.0" +version = "3.0.0" description = "Google Cloud Storage API client library" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "google_cloud_storage-3.2.0-py3-none-any.whl", hash = "sha256:ff7a9a49666954a7c3d1598291220c72d3b9e49d9dfcf9dfaecb301fc4fb0b24"}, - {file = "google_cloud_storage-3.2.0.tar.gz", hash = "sha256:decca843076036f45633198c125d1861ffbf47ebf5c0e3b98dcb9b2db155896c"}, + {file = "google_cloud_storage-3.0.0-py2.py3-none-any.whl", hash = "sha256:f85fd059650d2dbb0ac158a9a6b304b66143b35ed2419afec2905ca522eb2c6a"}, + {file = "google_cloud_storage-3.0.0.tar.gz", hash = "sha256:2accb3e828e584888beff1165e5f3ac61aa9088965eb0165794a82d8c7f95297"}, ] [package.dependencies] -google-api-core = ">=2.15.0,<3.0.0" -google-auth = ">=2.26.1,<3.0.0" -google-cloud-core = ">=2.4.2,<3.0.0" -google-crc32c = ">=1.1.3,<2.0.0" -google-resumable-media = ">=2.7.2,<3.0.0" -requests = ">=2.22.0,<3.0.0" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.7.2" +requests = ">=2.18.0,<3.0.0dev" [package.extras] -protobuf = ["protobuf (>=3.20.2,<7.0.0)"] -tracing = ["opentelemetry-api (>=1.1.0,<2.0.0)"] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] [[package]] name = "google-crc32c" -version = "1.7.1" +version = "1.6.0" description = "A python wrapper of the C library 'Google CRC32C'" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"gcsfs\"" -files = [ - {file = "google_crc32c-1.7.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b07d48faf8292b4db7c3d64ab86f950c2e94e93a11fd47271c28ba458e4a0d76"}, - {file = "google_crc32c-1.7.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7cc81b3a2fbd932a4313eb53cc7d9dde424088ca3a0337160f35d91826880c1d"}, - {file = "google_crc32c-1.7.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c67ca0a1f5b56162951a9dae987988679a7db682d6f97ce0f6381ebf0fbea4c"}, - {file = "google_crc32c-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc5319db92daa516b653600794d5b9f9439a9a121f3e162f94b0e1891c7933cb"}, - {file = "google_crc32c-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcdf5a64adb747610140572ed18d011896e3b9ae5195f2514b7ff678c80f1603"}, - {file = "google_crc32c-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:754561c6c66e89d55754106739e22fdaa93fafa8da7221b29c8b8e8270c6ec8a"}, - {file = "google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06"}, - {file = "google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9"}, - {file = "google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77"}, - {file = "google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53"}, - {file = "google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d"}, - {file = "google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194"}, - {file = "google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e"}, - {file = "google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337"}, - {file = "google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65"}, - {file = "google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6"}, - {file = "google_crc32c-1.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:df8b38bdaf1629d62d51be8bdd04888f37c451564c2042d36e5812da9eff3c35"}, - {file = "google_crc32c-1.7.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:e42e20a83a29aa2709a0cf271c7f8aefaa23b7ab52e53b322585297bb94d4638"}, - {file = "google_crc32c-1.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:905a385140bf492ac300026717af339790921f411c0dfd9aa5a9e69a08ed32eb"}, - {file = "google_crc32c-1.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b211ddaf20f7ebeec5c333448582c224a7c90a9d98826fbab82c0ddc11348e6"}, - {file = "google_crc32c-1.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:0f99eaa09a9a7e642a61e06742856eec8b19fc0037832e03f941fe7cf0c8e4db"}, - {file = "google_crc32c-1.7.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32d1da0d74ec5634a05f53ef7df18fc646666a25efaaca9fc7dcfd4caf1d98c3"}, - {file = "google_crc32c-1.7.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e10554d4abc5238823112c2ad7e4560f96c7bf3820b202660373d769d9e6e4c9"}, - {file = "google_crc32c-1.7.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:9fc196f0b8d8bd2789352c6a522db03f89e83a0ed6b64315923c396d7a932315"}, - {file = "google_crc32c-1.7.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb5e35dcd8552f76eed9461a23de1030920a3c953c1982f324be8f97946e7127"}, - {file = "google_crc32c-1.7.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f2226b6a8da04f1d9e61d3e357f2460b9551c5e6950071437e122c958a18ae14"}, - {file = "google_crc32c-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f2b3522222746fff0e04a9bd0a23ea003ba3cccc8cf21385c564deb1f223242"}, - {file = "google_crc32c-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bda0fcb632d390e3ea8b6b07bf6b4f4a66c9d02dcd6fbf7ba00a197c143f582"}, - {file = "google_crc32c-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:713121af19f1a617054c41f952294764e0c5443d5a5d9034b2cd60f5dd7e0349"}, - {file = "google_crc32c-1.7.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8e9afc74168b0b2232fb32dd202c93e46b7d5e4bf03e66ba5dc273bb3559589"}, - {file = "google_crc32c-1.7.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa8136cc14dd27f34a3221c0f16fd42d8a40e4778273e61a3c19aedaa44daf6b"}, - {file = "google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48"}, - {file = "google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82"}, - {file = "google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472"}, +files = [ + {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, + {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, + {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7"}, + {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e"}, + {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc"}, + {file = "google_crc32c-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42"}, + {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4"}, + {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8"}, + {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d"}, + {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f"}, + {file = "google_crc32c-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3"}, + {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d"}, + {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b"}, + {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00"}, + {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3"}, + {file = "google_crc32c-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760"}, + {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205"}, + {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0"}, + {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2"}, + {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871"}, + {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57"}, + {file = "google_crc32c-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c"}, + {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc"}, + {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d"}, + {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24"}, + {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d"}, + {file = "google_crc32c-1.6.0.tar.gz", hash = "sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc"}, ] [package.extras] @@ -1957,8 +1713,6 @@ version = "2.7.2" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, @@ -1973,22 +1727,20 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.70.0" +version = "1.66.0" description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, - {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, + {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, + {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0)"] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "graphql-core" @@ -1996,7 +1748,6 @@ version = "3.2.6" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." optional = false python-versions = "<4,>=3.6" -groups = ["dev"] files = [ {file = "graphql_core-3.2.6-py3-none-any.whl", hash = "sha256:78b016718c161a6fb20a7d97bbf107f331cd1afe53e45566c59f776ed7f0b45f"}, {file = "graphql_core-3.2.6.tar.gz", hash = "sha256:c08eec22f9e40f0bd61d805907e3b3b1b9a320bc606e23dc145eebca07c8fbab"}, @@ -2007,67 +1758,84 @@ typing-extensions = {version = ">=4,<5", markers = "python_version < \"3.10\""} [[package]] name = "greenlet" -version = "3.2.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and (extra == \"sql-postgres\" or extra == \"sql-sqlite\")" -files = [ - {file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b"}, - {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712"}, - {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00"}, - {file = "greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302"}, - {file = "greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5"}, - {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc"}, - {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba"}, - {file = "greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34"}, - {file = "greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb"}, - {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c"}, - {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163"}, - {file = "greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849"}, - {file = "greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b"}, - {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0"}, - {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36"}, - {file = "greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3"}, - {file = "greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141"}, - {file = "greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a"}, - {file = "greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26"}, - {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da"}, - {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4"}, - {file = "greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57"}, - {file = "greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322"}, - {file = "greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -2076,91 +1844,27 @@ test = ["objgraph", "psutil"] [[package]] name = "griffe" -version = "1.11.1" +version = "1.5.7" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ - {file = "griffe-1.11.1-py3-none-any.whl", hash = "sha256:5799cf7c513e4b928cfc6107ee6c4bc4a92e001f07022d97fd8dee2f612b6064"}, - {file = "griffe-1.11.1.tar.gz", hash = "sha256:d54ffad1ec4da9658901eb5521e9cddcdb7a496604f67d8ae71077f03f549b7e"}, + {file = "griffe-1.5.7-py3-none-any.whl", hash = "sha256:4af8ec834b64de954d447c7b6672426bb145e71605c74a4e22d510cc79fe7d8b"}, + {file = "griffe-1.5.7.tar.gz", hash = "sha256:465238c86deaf1137761f700fb343edd8ffc846d72f6de43c3c345ccdfbebe92"}, ] [package.dependencies] colorama = ">=0.4" -[[package]] -name = "hf-xet" -version = "1.1.5" -description = "Fast transfer of large files with the Hugging Face Hub." -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"hf\" and (platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\")" -files = [ - {file = "hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23"}, - {file = "hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8"}, - {file = "hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1"}, - {file = "hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18"}, - {file = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14"}, - {file = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a"}, - {file = "hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245"}, - {file = "hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "huggingface-hub" -version = "0.34.4" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = true -python-versions = ">=3.8.0" -groups = ["main"] -markers = "extra == \"hf\"" -files = [ - {file = "huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a"}, - {file = "huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -hf-xet = {version = ">=1.1.3,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""} -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -hf-xet = ["hf-xet (>=1.1.2,<2.0.0)"] -inference = ["aiohttp"] -mcp = ["aiohttp", "mcp (>=1.8.0)", "typer"] -oauth = ["authlib (>=1.3.2)", "fastapi", "httpx", "itsdangerous"] -quality = ["libcst (>=1.4.0)", "mypy (==1.15.0) ; python_version >= \"3.9\"", "mypy (>=1.14.1,<1.15.0) ; python_version == \"3.8\"", "ruff (>=0.9.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "authlib (>=1.3.2)", "fastapi", "gradio (>=4.0.0)", "httpx", "itsdangerous", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors[torch]", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - [[package]] name = "identify" -version = "2.6.12" +version = "2.6.7" description = "File identification library for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}, - {file = "identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}, + {file = "identify-2.6.7-py2.py3-none-any.whl", hash = "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0"}, + {file = "identify-2.6.7.tar.gz", hash = "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684"}, ] [package.extras] @@ -2172,7 +1876,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev", "docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -2187,60 +1890,43 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] -[[package]] -name = "impi-rt" -version = "2021.16.0" -description = "Intel® MPI Library" -optional = true -python-versions = "*" -groups = ["main"] -markers = "extra == \"bodo\" and sys_platform == \"win32\"" -files = [ - {file = "impi_rt-2021.16.0-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:d6121049766b2915f535b0abce65023e3ef4b0ef01c556ae9101dff7695a979f"}, - {file = "impi_rt-2021.16.0-py2.py3-none-win_amd64.whl", hash = "sha256:7f8e72b5bc020e6539115b1c02435b81bc16a6923fa82c99979baeedb4b2ba9a"}, -] - [[package]] name = "importlib-metadata" -version = "8.7.0" +version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" -groups = ["dev", "docs"] files = [ - {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, - {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] -markers = {dev = "python_full_version < \"3.10.2\"", docs = "python_version < \"3.10\""} [package.dependencies] zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.8" -groups = ["dev"] +python-versions = ">=3.7" files = [ - {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, - {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] @@ -2249,8 +1935,6 @@ version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"adlfs\"" files = [ {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, @@ -2262,7 +1946,6 @@ version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -2274,7 +1957,6 @@ version = "6.0.1" description = "Useful decorators and context managers" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, @@ -2285,7 +1967,7 @@ files = [ [package.extras] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "jaraco-packaging" @@ -2293,7 +1975,6 @@ version = "10.2.3" description = "tools to supplement packaging Python releases" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "jaraco.packaging-10.2.3-py3-none-any.whl", hash = "sha256:ceb5806d2ac5731ba5b265d196e4cb848afa2a958f01d0bf3a1dfaa3969ed92c"}, {file = "jaraco_packaging-10.2.3.tar.gz", hash = "sha256:d726cc42faa62b2f70585cbe1176b4b469fe6d75f21b19034b688b4340917933"}, @@ -2307,18 +1988,17 @@ sphinx = "*" [package.extras] doc = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "types-docutils"] +test = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "types-docutils"] [[package]] name = "jinja2" -version = "3.1.6" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["dev", "docs"] files = [ - {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, - {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -2333,23 +2013,20 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] -markers = {main = "extra == \"dynamodb\" or extra == \"glue\" or extra == \"rest-sigv4\" or extra == \"s3fs\""} [[package]] name = "joserfc" -version = "1.2.2" +version = "1.0.3" description = "The ultimate Python library for JOSE RFCs, including JWS, JWE, JWK, JWA, JWT" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "joserfc-1.2.2-py3-none-any.whl", hash = "sha256:630cc36b2f11f749980401b0cd7305fab5735ee11d830d919bc207305d011358"}, - {file = "joserfc-1.2.2.tar.gz", hash = "sha256:0d2a84feecef96168635fd9bf288363fc75b4afef3d99691f77833c8e025d200"}, + {file = "joserfc-1.0.3-py3-none-any.whl", hash = "sha256:76c7efafb9b7bc635dd73e9e3819d393d952f042c24d6a98182759d39cbc743b"}, + {file = "joserfc-1.0.3.tar.gz", hash = "sha256:bcbed6fdfeefb9dc3bcca827f7539c57b353d514fbddf6c722a1c35aea4eb499"}, ] [package.dependencies] @@ -2364,7 +2041,6 @@ version = "1.33" description = "Apply JSON-Patches (RFC 6902)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -groups = ["dev"] files = [ {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, @@ -2379,7 +2055,6 @@ version = "1.7.0" description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, @@ -2395,7 +2070,6 @@ version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -2403,16 +2077,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.24.0" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] +python-versions = ">=3.8" files = [ - {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, - {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] -markers = {main = "extra == \"ray\""} [package.dependencies] attrs = ">=22.2.0" @@ -2430,7 +2102,6 @@ version = "0.3.4" description = "JSONSchema Spec with object-oriented paths" optional = false python-versions = "<4.0.0,>=3.8.0" -groups = ["dev"] files = [ {file = "jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8"}, {file = "jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001"}, @@ -2444,140 +2115,80 @@ requests = ">=2.31.0,<3.0.0" [[package]] name = "jsonschema-specifications" -version = "2025.4.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, - {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] -markers = {main = "extra == \"ray\""} [package.dependencies] referencing = ">=0.31.0" -[[package]] -name = "kerberos" -version = "1.3.1" -description = "Kerberos high-level interface" -optional = true -python-versions = "*" -groups = ["main"] -markers = "extra == \"hive-kerberos\"" -files = [ - {file = "kerberos-1.3.1-cp27-cp27m-macosx_11_1_x86_64.whl", hash = "sha256:98a695c072efef535cb2b5f98e474d00671588859a94ec96c2c1508a113ff3aa"}, - {file = "kerberos-1.3.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:2e03c6a9d201d4aab5f899bfb8150de15335955bfce8ca43bfe9a41d7aae54dc"}, - {file = "kerberos-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2002b3b1541fc51e2c081ee7048f55e5d9ca63dd09f0d7b951c263920db3a0bb"}, - {file = "kerberos-1.3.1.tar.gz", hash = "sha256:cdd046142a4e0060f96a00eb13d82a5d9ebc0f2d7934393ed559bac773460a2c"}, -] - [[package]] name = "lazy-object-proxy" -version = "1.11.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "lazy_object_proxy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:132bc8a34f2f2d662a851acfd1b93df769992ed1b81e2b1fda7db3e73b0d5a18"}, - {file = "lazy_object_proxy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:01261a3afd8621a1accb5682df2593dc7ec7d21d38f411011a5712dcd418fbed"}, - {file = "lazy_object_proxy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:090935756cc041e191f22f4f9c7fd4fe9a454717067adf5b1bbd2ce3046b556e"}, - {file = "lazy_object_proxy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:76ec715017f06410f57df442c1a8d66e6b5f7035077785b129817f5ae58810a4"}, - {file = "lazy_object_proxy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a9f39098e93a63618a79eef2889ae3cf0605f676cd4797fdfd49fcd7ddc318b"}, - {file = "lazy_object_proxy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee13f67f4fcd044ef27bfccb1c93d39c100046fec1fad6e9a1fcdfd17492aeb3"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4c84eafd8dd15ea16f7d580758bc5c2ce1f752faec877bb2b1f9f827c329cd"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:d2503427bda552d3aefcac92f81d9e7ca631e680a2268cbe62cd6a58de6409b7"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0613116156801ab3fccb9e2b05ed83b08ea08c2517fdc6c6bc0d4697a1a376e3"}, - {file = "lazy_object_proxy-1.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bb03c507d96b65f617a6337dedd604399d35face2cdf01526b913fb50c4cb6e8"}, - {file = "lazy_object_proxy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28c174db37946f94b97a97b579932ff88f07b8d73a46b6b93322b9ac06794a3b"}, - {file = "lazy_object_proxy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:d662f0669e27704495ff1f647070eb8816931231c44e583f4d0701b7adf6272f"}, - {file = "lazy_object_proxy-1.11.0-py3-none-any.whl", hash = "sha256:a56a5093d433341ff7da0e89f9b486031ccd222ec8e52ec84d0ec1cdc819674b"}, - {file = "lazy_object_proxy-1.11.0.tar.gz", hash = "sha256:18874411864c9fbbbaa47f9fc1dd7aea754c86cfde21278ef427639d1dd78e9c"}, -] - -[[package]] -name = "llvmlite" -version = "0.43.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version < \"3.10\" and extra == \"bodo\"" -files = [ - {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, - {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, - {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, - {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, - {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, - {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, - {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, -] - -[[package]] -name = "llvmlite" -version = "0.44.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = true -python-versions = ">=3.10" -groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"bodo\"" -files = [ - {file = "llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614"}, - {file = "llvmlite-0.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cccf8eb28f24840f2689fb1a45f9c0f7e582dd24e088dcf96e424834af11f791"}, - {file = "llvmlite-0.44.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7202b678cdf904823c764ee0fe2dfe38a76981f4c1e51715b4cb5abb6cf1d9e8"}, - {file = "llvmlite-0.44.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40526fb5e313d7b96bda4cbb2c85cd5374e04d80732dd36a282d72a560bb6408"}, - {file = "llvmlite-0.44.0-cp310-cp310-win_amd64.whl", hash = "sha256:41e3839150db4330e1b2716c0be3b5c4672525b4c9005e17c7597f835f351ce2"}, - {file = "llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3"}, - {file = "llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427"}, - {file = "llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1"}, - {file = "llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610"}, - {file = "llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955"}, - {file = "llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad"}, - {file = "llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db"}, - {file = "llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9"}, - {file = "llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d"}, - {file = "llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1"}, - {file = "llvmlite-0.44.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:319bddd44e5f71ae2689859b7203080716448a3cd1128fb144fe5c055219d516"}, - {file = "llvmlite-0.44.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c58867118bad04a0bb22a2e0068c693719658105e40009ffe95c7000fcde88e"}, - {file = "llvmlite-0.44.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46224058b13c96af1365290bdfebe9a6264ae62fb79b2b55693deed11657a8bf"}, - {file = "llvmlite-0.44.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa0097052c32bf721a4efc03bd109d335dfa57d9bffb3d4c24cc680711b8b4fc"}, - {file = "llvmlite-0.44.0-cp313-cp313-win_amd64.whl", hash = "sha256:2fb7c4f2fb86cbae6dca3db9ab203eeea0e22d73b99bc2341cdf9de93612e930"}, - {file = "llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4"}, +python-versions = ">=3.8" +files = [ + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] name = "markdown" -version = "3.8.2" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.9" -groups = ["docs"] +python-versions = ">=3.8" files = [ - {file = "markdown-3.8.2-py3-none-any.whl", hash = "sha256:5c83764dbd4e00bdd94d85a19b8d55ccca20fe35b2e678a1422b380324dd5f24"}, - {file = "markdown-3.8.2.tar.gz", hash = "sha256:247b9a70dd12e27f67431ce62523e675b866d254f900c4fe75ce3dda62237c45"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] -docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -2586,7 +2197,6 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -2611,7 +2221,6 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["dev", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -2682,7 +2291,6 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -2694,7 +2302,6 @@ version = "1.3.4" description = "A deep merge function for 🐍." optional = false python-versions = ">=3.6" -groups = ["docs"] files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, @@ -2706,7 +2313,6 @@ version = "1.6.1" description = "Project documentation with Markdown." optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, @@ -2730,18 +2336,17 @@ watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] [[package]] name = "mkdocs-autorefs" -version = "1.4.2" +version = "1.3.1" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ - {file = "mkdocs_autorefs-1.4.2-py3-none-any.whl", hash = "sha256:83d6d777b66ec3c372a1aad4ae0cf77c243ba5bcda5bf0c6b8a2c5e7a3d89f13"}, - {file = "mkdocs_autorefs-1.4.2.tar.gz", hash = "sha256:e2ebe1abd2b67d597ed19378c0fff84d73d1dbce411fce7a7cc6f161888b6749"}, + {file = "mkdocs_autorefs-1.3.1-py3-none-any.whl", hash = "sha256:18c504ae4d3ee7f344369bb26cb31d4105569ee252aab7d75ec2734c2c8b0474"}, + {file = "mkdocs_autorefs-1.3.1.tar.gz", hash = "sha256:a6d30cbcccae336d622a66c2418a3c92a8196b69782774529ad441abb23c0902"}, ] [package.dependencies] @@ -2755,7 +2360,6 @@ version = "0.5.0" description = "MkDocs plugin to programmatically generate documentation pages during the build" optional = false python-versions = ">=3.7" -groups = ["docs"] files = [ {file = "mkdocs_gen_files-0.5.0-py3-none-any.whl", hash = "sha256:7ac060096f3f40bd19039e7277dd3050be9a453c8ac578645844d4d91d7978ea"}, {file = "mkdocs_gen_files-0.5.0.tar.gz", hash = "sha256:4c7cf256b5d67062a788f6b1d035e157fc1a9498c2399be9af5257d4ff4d19bc"}, @@ -2770,7 +2374,6 @@ version = "0.2.0" description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, @@ -2784,42 +2387,40 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-literate-nav" -version = "0.6.2" +version = "0.6.1" description = "MkDocs plugin to specify the navigation in Markdown instead of YAML" optional = false -python-versions = ">=3.9" -groups = ["docs"] +python-versions = ">=3.7" files = [ - {file = "mkdocs_literate_nav-0.6.2-py3-none-any.whl", hash = "sha256:0a6489a26ec7598477b56fa112056a5e3a6c15729f0214bea8a4dbc55bd5f630"}, - {file = "mkdocs_literate_nav-0.6.2.tar.gz", hash = "sha256:760e1708aa4be86af81a2b56e82c739d5a8388a0eab1517ecfd8e5aa40810a75"}, + {file = "mkdocs_literate_nav-0.6.1-py3-none-any.whl", hash = "sha256:e70bdc4a07050d32da79c0b697bd88e9a104cf3294282e9cb20eec94c6b0f401"}, + {file = "mkdocs_literate_nav-0.6.1.tar.gz", hash = "sha256:78a7ab6d878371728acb0cdc6235c9b0ffc6e83c997b037f4a5c6ff7cef7d759"}, ] [package.dependencies] -mkdocs = ">=1.4.1" +mkdocs = ">=1.0.3" [[package]] name = "mkdocs-material" -version = "9.6.16" +version = "9.6.4" description = "Documentation that simply works" optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ - {file = "mkdocs_material-9.6.16-py3-none-any.whl", hash = "sha256:8d1a1282b892fe1fdf77bfeb08c485ba3909dd743c9ba69a19a40f637c6ec18c"}, - {file = "mkdocs_material-9.6.16.tar.gz", hash = "sha256:d07011df4a5c02ee0877496d9f1bfc986cfb93d964799b032dd99fe34c0e9d19"}, + {file = "mkdocs_material-9.6.4-py3-none-any.whl", hash = "sha256:414e8376551def6d644b8e6f77226022868532a792eb2c9accf52199009f568f"}, + {file = "mkdocs_material-9.6.4.tar.gz", hash = "sha256:4d1d35e1c1d3e15294cb7fa5d02e0abaee70d408f75027dc7be6e30fb32e6867"}, ] [package.dependencies] babel = ">=2.10,<3.0" -backrefs = ">=5.7.post1,<6.0" colorama = ">=0.4,<1.0" -jinja2 = ">=3.1,<4.0" +jinja2 = ">=3.0,<4.0" markdown = ">=3.2,<4.0" mkdocs = ">=1.6,<2.0" mkdocs-material-extensions = ">=1.3,<2.0" paginate = ">=0.5,<1.0" pygments = ">=2.16,<3.0" pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" requests = ">=2.26,<3.0" [package.extras] @@ -2833,7 +2434,6 @@ version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, @@ -2841,14 +2441,13 @@ files = [ [[package]] name = "mkdocs-section-index" -version = "0.3.10" +version = "0.3.9" description = "MkDocs plugin to allow clickable sections that lead to an index page" optional = false -python-versions = ">=3.9" -groups = ["docs"] +python-versions = ">=3.8" files = [ - {file = "mkdocs_section_index-0.3.10-py3-none-any.whl", hash = "sha256:bc27c0d0dc497c0ebaee1fc72839362aed77be7318b5ec0c30628f65918e4776"}, - {file = "mkdocs_section_index-0.3.10.tar.gz", hash = "sha256:a82afbda633c82c5568f0e3b008176b9b365bf4bd8b6f919d6eff09ee146b9f8"}, + {file = "mkdocs_section_index-0.3.9-py3-none-any.whl", hash = "sha256:5e5eb288e8d7984d36c11ead5533f376fdf23498f44e903929d72845b24dfe34"}, + {file = "mkdocs_section_index-0.3.9.tar.gz", hash = "sha256:b66128d19108beceb08b226ee1ba0981840d14baf8a652b6c59e650f3f92e4f8"}, ] [package.dependencies] @@ -2856,14 +2455,13 @@ mkdocs = ">=1.2" [[package]] name = "mkdocstrings" -version = "0.30.0" +version = "0.28.1" description = "Automatic documentation from sources, for MkDocs." optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ - {file = "mkdocstrings-0.30.0-py3-none-any.whl", hash = "sha256:ae9e4a0d8c1789697ac776f2e034e2ddd71054ae1cf2c2bb1433ccfd07c226f2"}, - {file = "mkdocstrings-0.30.0.tar.gz", hash = "sha256:5d8019b9c31ddacd780b6784ffcdd6f21c408f34c0bd1103b5351d609d5b4444"}, + {file = "mkdocstrings-0.28.1-py3-none-any.whl", hash = "sha256:a5878ae5cd1e26f491ff084c1f9ab995687d52d39a5c558e9b7023d0e4e0b740"}, + {file = "mkdocstrings-0.28.1.tar.gz", hash = "sha256:fb64576906771b7701e8e962fd90073650ff689e95eb86e86751a66d65ab4489"}, ] [package.dependencies] @@ -2871,189 +2469,148 @@ importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} Jinja2 = ">=2.11.1" Markdown = ">=3.6" MarkupSafe = ">=1.1" -mkdocs = ">=1.6" -mkdocs-autorefs = ">=1.4" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=1.3" +mkdocs-get-deps = ">=0.2" pymdown-extensions = ">=6.3" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} [package.extras] crystal = ["mkdocstrings-crystal (>=0.3.4)"] -python = ["mkdocstrings-python (>=1.16.2)"] +python = ["mkdocstrings-python (>=0.5.2)"] python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.16.12" +version = "1.16.1" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ - {file = "mkdocstrings_python-1.16.12-py3-none-any.whl", hash = "sha256:22ded3a63b3d823d57457a70ff9860d5a4de9e8b1e482876fc9baabaf6f5f374"}, - {file = "mkdocstrings_python-1.16.12.tar.gz", hash = "sha256:9b9eaa066e0024342d433e332a41095c4e429937024945fea511afe58f63175d"}, + {file = "mkdocstrings_python-1.16.1-py3-none-any.whl", hash = "sha256:b88ff6fc6a293cee9cb42313f1cba37a2c5cdf37bcc60b241ec7ab66b5d41b58"}, + {file = "mkdocstrings_python-1.16.1.tar.gz", hash = "sha256:d7152d17da74d3616a0f17df5d2da771ecf7340518c158650e5a64a0a95973f4"}, ] [package.dependencies] -griffe = ">=1.6.2" -mkdocs-autorefs = ">=1.4" -mkdocstrings = ">=0.28.3" +griffe = ">=0.49" +mkdocs-autorefs = ">=1.2" +mkdocstrings = ">=0.28" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [[package]] name = "mmh3" -version = "5.2.0" +version = "5.1.0" description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "mmh3-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:81c504ad11c588c8629536b032940f2a359dda3b6cbfd4ad8f74cb24dcd1b0bc"}, - {file = "mmh3-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b898cecff57442724a0f52bf42c2de42de63083a91008fb452887e372f9c328"}, - {file = "mmh3-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be1374df449465c9f2500e62eee73a39db62152a8bdfbe12ec5b5c1cd451344d"}, - {file = "mmh3-5.2.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0d753ad566c721faa33db7e2e0eddd74b224cdd3eaf8481d76c926603c7a00e"}, - {file = "mmh3-5.2.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dfbead5575f6470c17e955b94f92d62a03dfc3d07f2e6f817d9b93dc211a1515"}, - {file = "mmh3-5.2.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7434a27754049144539d2099a6d2da5d88b8bdeedf935180bf42ad59b3607aa3"}, - {file = "mmh3-5.2.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cadc16e8ea64b5d9a47363013e2bea469e121e6e7cb416a7593aeb24f2ad122e"}, - {file = "mmh3-5.2.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d765058da196f68dc721116cab335e696e87e76720e6ef8ee5a24801af65e63d"}, - {file = "mmh3-5.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8b0c53fe0994beade1ad7c0f13bd6fec980a0664bfbe5a6a7d64500b9ab76772"}, - {file = "mmh3-5.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:49037d417419863b222ae47ee562b2de9c3416add0a45c8d7f4e864be8dc4f89"}, - {file = "mmh3-5.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:6ecb4e750d712abde046858ee6992b65c93f1f71b397fce7975c3860c07365d2"}, - {file = "mmh3-5.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:382a6bb3f8c6532ea084e7acc5be6ae0c6effa529240836d59352398f002e3fc"}, - {file = "mmh3-5.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7733ec52296fc1ba22e9b90a245c821adbb943e98c91d8a330a2254612726106"}, - {file = "mmh3-5.2.0-cp310-cp310-win32.whl", hash = "sha256:127c95336f2a98c51e7682341ab7cb0be3adb9df0819ab8505a726ed1801876d"}, - {file = "mmh3-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:419005f84ba1cab47a77465a2a843562dadadd6671b8758bf179d82a15ca63eb"}, - {file = "mmh3-5.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:d22c9dcafed659fadc605538946c041722b6d1104fe619dbf5cc73b3c8a0ded8"}, - {file = "mmh3-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7901c893e704ee3c65f92d39b951f8f34ccf8e8566768c58103fb10e55afb8c1"}, - {file = "mmh3-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5f5536b1cbfa72318ab3bfc8a8188b949260baed186b75f0abc75b95d8c051"}, - {file = "mmh3-5.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cedac4f4054b8f7859e5aed41aaa31ad03fce6851901a7fdc2af0275ac533c10"}, - {file = "mmh3-5.2.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eb756caf8975882630ce4e9fbbeb9d3401242a72528230422c9ab3a0d278e60c"}, - {file = "mmh3-5.2.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:097e13c8b8a66c5753c6968b7640faefe85d8e38992703c1f666eda6ef4c3762"}, - {file = "mmh3-5.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7c0c7845566b9686480e6a7e9044db4afb60038d5fabd19227443f0104eeee4"}, - {file = "mmh3-5.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:61ac226af521a572700f863d6ecddc6ece97220ce7174e311948ff8c8919a363"}, - {file = "mmh3-5.2.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:582f9dbeefe15c32a5fa528b79b088b599a1dfe290a4436351c6090f90ddebb8"}, - {file = "mmh3-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2ebfc46b39168ab1cd44670a32ea5489bcbc74a25795c61b6d888c5c2cf654ed"}, - {file = "mmh3-5.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1556e31e4bd0ac0c17eaf220be17a09c171d7396919c3794274cb3415a9d3646"}, - {file = "mmh3-5.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81df0dae22cd0da87f1c978602750f33d17fb3d21fb0f326c89dc89834fea79b"}, - {file = "mmh3-5.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:eba01ec3bd4a49b9ac5ca2bc6a73ff5f3af53374b8556fcc2966dd2af9eb7779"}, - {file = "mmh3-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9a011469b47b752e7d20de296bb34591cdfcbe76c99c2e863ceaa2aa61113d2"}, - {file = "mmh3-5.2.0-cp311-cp311-win32.whl", hash = "sha256:bc44fc2b886243d7c0d8daeb37864e16f232e5b56aaec27cc781d848264cfd28"}, - {file = "mmh3-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ebf241072cf2777a492d0e09252f8cc2b3edd07dfdb9404b9757bffeb4f2cee"}, - {file = "mmh3-5.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:b5f317a727bba0e633a12e71228bc6a4acb4f471a98b1c003163b917311ea9a9"}, - {file = "mmh3-5.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:384eda9361a7bf83a85e09447e1feafe081034af9dd428893701b959230d84be"}, - {file = "mmh3-5.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c9da0d568569cc87315cb063486d761e38458b8ad513fedd3dc9263e1b81bcd"}, - {file = "mmh3-5.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86d1be5d63232e6eb93c50881aea55ff06eb86d8e08f9b5417c8c9b10db9db96"}, - {file = "mmh3-5.2.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf7bee43e17e81671c447e9c83499f53d99bf440bc6d9dc26a841e21acfbe094"}, - {file = "mmh3-5.2.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7aa18cdb58983ee660c9c400b46272e14fa253c675ed963d3812487f8ca42037"}, - {file = "mmh3-5.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9d032488fcec32d22be6542d1a836f00247f40f320844dbb361393b5b22773"}, - {file = "mmh3-5.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1861fb6b1d0453ed7293200139c0a9011eeb1376632e048e3766945b13313c5"}, - {file = "mmh3-5.2.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99bb6a4d809aa4e528ddfe2c85dd5239b78b9dd14be62cca0329db78505e7b50"}, - {file = "mmh3-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1f8d8b627799f4e2fcc7c034fed8f5f24dc7724ff52f69838a3d6d15f1ad4765"}, - {file = "mmh3-5.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b5995088dd7023d2d9f310a0c67de5a2b2e06a570ecfd00f9ff4ab94a67cde43"}, - {file = "mmh3-5.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1a5f4d2e59d6bba8ef01b013c472741835ad961e7c28f50c82b27c57748744a4"}, - {file = "mmh3-5.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fd6e6c3d90660d085f7e73710eab6f5545d4854b81b0135a3526e797009dbda3"}, - {file = "mmh3-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c4a2f3d83879e3de2eb8cbf562e71563a8ed15ee9b9c2e77ca5d9f73072ac15c"}, - {file = "mmh3-5.2.0-cp312-cp312-win32.whl", hash = "sha256:2421b9d665a0b1ad724ec7332fb5a98d075f50bc51a6ff854f3a1882bd650d49"}, - {file = "mmh3-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d80005b7634a3a2220f81fbeb94775ebd12794623bb2e1451701ea732b4aa3"}, - {file = "mmh3-5.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:3d6bfd9662a20c054bc216f861fa330c2dac7c81e7fb8307b5e32ab5b9b4d2e0"}, - {file = "mmh3-5.2.0-cp313-cp313-android_21_arm64_v8a.whl", hash = "sha256:e79c00eba78f7258e5b354eccd4d7907d60317ced924ea4a5f2e9d83f5453065"}, - {file = "mmh3-5.2.0-cp313-cp313-android_21_x86_64.whl", hash = "sha256:956127e663d05edbeec54df38885d943dfa27406594c411139690485128525de"}, - {file = "mmh3-5.2.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:c3dca4cb5b946ee91b3d6bb700d137b1cd85c20827f89fdf9c16258253489044"}, - {file = "mmh3-5.2.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:e651e17bfde5840e9e4174b01e9e080ce49277b70d424308b36a7969d0d1af73"}, - {file = "mmh3-5.2.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:9f64bf06f4bf623325fda3a6d02d36cd69199b9ace99b04bb2d7fd9f89688504"}, - {file = "mmh3-5.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ddc63328889bcaee77b743309e5c7d2d52cee0d7d577837c91b6e7cc9e755e0b"}, - {file = "mmh3-5.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bb0fdc451fb6d86d81ab8f23d881b8d6e37fc373a2deae1c02d27002d2ad7a05"}, - {file = "mmh3-5.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b29044e1ffdb84fe164d0a7ea05c7316afea93c00f8ed9449cf357c36fc4f814"}, - {file = "mmh3-5.2.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:58981d6ea9646dbbf9e59a30890cbf9f610df0e4a57dbfe09215116fd90b0093"}, - {file = "mmh3-5.2.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e5634565367b6d98dc4aa2983703526ef556b3688ba3065edb4b9b90ede1c54"}, - {file = "mmh3-5.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0271ac12415afd3171ab9a3c7cbfc71dee2c68760a7dc9d05bf8ed6ddfa3a7a"}, - {file = "mmh3-5.2.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:45b590e31bc552c6f8e2150ff1ad0c28dd151e9f87589e7eaf508fbdd8e8e908"}, - {file = "mmh3-5.2.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bdde97310d59604f2a9119322f61b31546748499a21b44f6715e8ced9308a6c5"}, - {file = "mmh3-5.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc9c5f280438cf1c1a8f9abb87dc8ce9630a964120cfb5dd50d1e7ce79690c7a"}, - {file = "mmh3-5.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c903e71fd8debb35ad2a4184c1316b3cb22f64ce517b4e6747f25b0a34e41266"}, - {file = "mmh3-5.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:eed4bba7ff8a0d37106ba931ab03bdd3915fbb025bcf4e1f0aa02bc8114960c5"}, - {file = "mmh3-5.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1fdb36b940e9261aff0b5177c5b74a36936b902f473180f6c15bde26143681a9"}, - {file = "mmh3-5.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7303aab41e97adcf010a09efd8f1403e719e59b7705d5e3cfed3dd7571589290"}, - {file = "mmh3-5.2.0-cp313-cp313-win32.whl", hash = "sha256:03e08c6ebaf666ec1e3d6ea657a2d363bb01effd1a9acfe41f9197decaef0051"}, - {file = "mmh3-5.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:7fddccd4113e7b736706e17a239a696332360cbaddf25ae75b57ba1acce65081"}, - {file = "mmh3-5.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:fa0c966ee727aad5406d516375593c5f058c766b21236ab8985693934bb5085b"}, - {file = "mmh3-5.2.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:e5015f0bb6eb50008bed2d4b1ce0f2a294698a926111e4bb202c0987b4f89078"}, - {file = "mmh3-5.2.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:e0f3ed828d709f5b82d8bfe14f8856120718ec4bd44a5b26102c3030a1e12501"}, - {file = "mmh3-5.2.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:f35727c5118aba95f0397e18a1a5b8405425581bfe53e821f0fb444cbdc2bc9b"}, - {file = "mmh3-5.2.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bc244802ccab5220008cb712ca1508cb6a12f0eb64ad62997156410579a1770"}, - {file = "mmh3-5.2.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ff3d50dc3fe8a98059f99b445dfb62792b5d006c5e0b8f03c6de2813b8376110"}, - {file = "mmh3-5.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:37a358cc881fe796e099c1db6ce07ff757f088827b4e8467ac52b7a7ffdca647"}, - {file = "mmh3-5.2.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b9a87025121d1c448f24f27ff53a5fe7b6ef980574b4a4f11acaabe702420d63"}, - {file = "mmh3-5.2.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ba55d6ca32eeef8b2625e1e4bfc3b3db52bc63014bd7e5df8cc11bf2b036b12"}, - {file = "mmh3-5.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9ff37ba9f15637e424c2ab57a1a590c52897c845b768e4e0a4958084ec87f22"}, - {file = "mmh3-5.2.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a094319ec0db52a04af9fdc391b4d39a1bc72bc8424b47c4411afb05413a44b5"}, - {file = "mmh3-5.2.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c5584061fd3da584659b13587f26c6cad25a096246a481636d64375d0c1f6c07"}, - {file = "mmh3-5.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecbfc0437ddfdced5e7822d1ce4855c9c64f46819d0fdc4482c53f56c707b935"}, - {file = "mmh3-5.2.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:7b986d506a8e8ea345791897ba5d8ba0d9d8820cd4fc3e52dbe6de19388de2e7"}, - {file = "mmh3-5.2.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:38d899a156549da8ef6a9f1d6f7ef231228d29f8f69bce2ee12f5fba6d6fd7c5"}, - {file = "mmh3-5.2.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d86651fa45799530885ba4dab3d21144486ed15285e8784181a0ab37a4552384"}, - {file = "mmh3-5.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c463d7c1c4cfc9d751efeaadd936bbba07b5b0ed81a012b3a9f5a12f0872bd6e"}, - {file = "mmh3-5.2.0-cp314-cp314-win32.whl", hash = "sha256:bb4fe46bdc6104fbc28db7a6bacb115ee6368ff993366bbd8a2a7f0076e6f0c0"}, - {file = "mmh3-5.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c7f0b342fd06044bedd0b6e72177ddc0076f54fd89ee239447f8b271d919d9b"}, - {file = "mmh3-5.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:3193752fc05ea72366c2b63ff24b9a190f422e32d75fdeae71087c08fff26115"}, - {file = "mmh3-5.2.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:69fc339d7202bea69ef9bd7c39bfdf9fdabc8e6822a01eba62fb43233c1b3932"}, - {file = "mmh3-5.2.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:12da42c0a55c9d86ab566395324213c319c73ecb0c239fad4726324212b9441c"}, - {file = "mmh3-5.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f7f9034c7cf05ddfaac8d7a2e63a3c97a840d4615d0a0e65ba8bdf6f8576e3be"}, - {file = "mmh3-5.2.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11730eeb16dfcf9674fdea9bb6b8e6dd9b40813b7eb839bc35113649eef38aeb"}, - {file = "mmh3-5.2.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:932a6eec1d2e2c3c9e630d10f7128d80e70e2d47fe6b8c7ea5e1afbd98733e65"}, - {file = "mmh3-5.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ca975c51c5028947bbcfc24966517aac06a01d6c921e30f7c5383c195f87991"}, - {file = "mmh3-5.2.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5b0b58215befe0f0e120b828f7645e97719bbba9f23b69e268ed0ac7adde8645"}, - {file = "mmh3-5.2.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29c2b9ce61886809d0492a274a5a53047742dea0f703f9c4d5d223c3ea6377d3"}, - {file = "mmh3-5.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a367d4741ac0103f8198c82f429bccb9359f543ca542b06a51f4f0332e8de279"}, - {file = "mmh3-5.2.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:5a5dba98e514fb26241868f6eb90a7f7ca0e039aed779342965ce24ea32ba513"}, - {file = "mmh3-5.2.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:941603bfd75a46023807511c1ac2f1b0f39cccc393c15039969806063b27e6db"}, - {file = "mmh3-5.2.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:132dd943451a7c7546978863d2f5a64977928410782e1a87d583cb60eb89e667"}, - {file = "mmh3-5.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f698733a8a494466432d611a8f0d1e026f5286dee051beea4b3c3146817e35d5"}, - {file = "mmh3-5.2.0-cp314-cp314t-win32.whl", hash = "sha256:6d541038b3fc360ec538fc116de87462627944765a6750308118f8b509a8eec7"}, - {file = "mmh3-5.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e912b19cf2378f2967d0c08e86ff4c6c360129887f678e27e4dde970d21b3f4d"}, - {file = "mmh3-5.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e7884931fe5e788163e7b3c511614130c2c59feffdc21112290a194487efb2e9"}, - {file = "mmh3-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c6041fd9d5fb5fcac57d5c80f521a36b74aea06b8566431c63e4ffc49aced51"}, - {file = "mmh3-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:58477cf9ef16664d1ce2b038f87d2dc96d70fe50733a34a7f07da6c9a5e3538c"}, - {file = "mmh3-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be7d3dca9358e01dab1bad881fb2b4e8730cec58d36dd44482bc068bfcd3bc65"}, - {file = "mmh3-5.2.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:931d47e08c9c8a67bf75d82f0ada8399eac18b03388818b62bfa42882d571d72"}, - {file = "mmh3-5.2.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dd966df3489ec13848d6c6303429bbace94a153f43d1ae2a55115fd36fd5ca5d"}, - {file = "mmh3-5.2.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c677d78887244bf3095020b73c42b505b700f801c690f8eaa90ad12d3179612f"}, - {file = "mmh3-5.2.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63830f846797187c5d3e2dae50f0848fdc86032f5bfdc58ae352f02f857e9025"}, - {file = "mmh3-5.2.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c3f563e8901960e2eaa64c8e8821895818acabeb41c96f2efbb936f65dbe486c"}, - {file = "mmh3-5.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96f1e1ac44cbb42bcc406e509f70c9af42c594e72ccc7b1257f97554204445f0"}, - {file = "mmh3-5.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7bbb0df897944b5ec830f3ad883e32c5a7375370a521565f5fe24443bfb2c4f7"}, - {file = "mmh3-5.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1fae471339ae1b9c641f19cf46dfe6ffd7f64b1fba7c4333b99fa3dd7f21ae0a"}, - {file = "mmh3-5.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:aa6e5d31fdc5ed9e3e95f9873508615a778fe9b523d52c17fc770a3eb39ab6e4"}, - {file = "mmh3-5.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:746a5ee71c6d1103d9b560fa147881b5e68fd35da56e54e03d5acefad0e7c055"}, - {file = "mmh3-5.2.0-cp39-cp39-win32.whl", hash = "sha256:10983c10f5c77683bd845751905ba535ec47409874acc759d5ce3ff7ef34398a"}, - {file = "mmh3-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fdfd3fb739f4e22746e13ad7ba0c6eedf5f454b18d11249724a388868e308ee4"}, - {file = "mmh3-5.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:33576136c06b46a7046b6d83a3d75fbca7d25f84cec743f1ae156362608dc6d2"}, - {file = "mmh3-5.2.0.tar.gz", hash = "sha256:1efc8fec8478e9243a78bb993422cf79f8ff85cb4cf6b79647480a31e0d950a8"}, +files = [ + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4ba8cac21e1f2d4e436ce03a82a7f87cda80378691f760e9ea55045ec480a3d"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69281c281cb01994f054d862a6bb02a2e7acfe64917795c58934b0872b9ece4"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d05ed3962312fbda2a1589b97359d2467f677166952f6bd410d8c916a55febf"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ae6a03f4cff4aa92ddd690611168856f8c33a141bd3e5a1e0a85521dc21ea0"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f983535b39795d9fb7336438faae117424c6798f763d67c6624f6caf2c4c01"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d46fdd80d4c7ecadd9faa6181e92ccc6fe91c50991c9af0e371fdf8b8a7a6150"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16e976af7365ea3b5c425124b2a7f0147eed97fdbb36d99857f173c8d8e096"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6fa97f7d1e1f74ad1565127229d510f3fd65d931fdedd707c1e15100bc9e5ebb"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4052fa4a8561bd62648e9eb993c8f3af3bdedadf3d9687aa4770d10e3709a80c"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3f0e8ae9f961037f812afe3cce7da57abf734285961fffbeff9a4c011b737732"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99297f207db967814f1f02135bb7fe7628b9eacb046134a34e1015b26b06edce"}, + {file = "mmh3-5.1.0-cp310-cp310-win32.whl", hash = "sha256:2e6c8dc3631a5e22007fbdb55e993b2dbce7985c14b25b572dd78403c2e79182"}, + {file = "mmh3-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:e4e8c7ad5a4dddcfde35fd28ef96744c1ee0f9d9570108aa5f7e77cf9cfdf0bf"}, + {file = "mmh3-5.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:45da549269883208912868a07d0364e1418d8292c4259ca11699ba1b2475bd26"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258"}, + {file = "mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372"}, + {file = "mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759"}, + {file = "mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df"}, + {file = "mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76"}, + {file = "mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776"}, + {file = "mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d6719045cda75c3f40397fc24ab67b18e0cb8f69d3429ab4c39763c4c608dd"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19fa07d303a91f8858982c37e6939834cb11893cb3ff20e6ee6fa2a7563826a"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31b47a620d622fbde8ca1ca0435c5d25de0ac57ab507209245e918128e38e676"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f810647c22c179b6821079f7aa306d51953ac893587ee09cf1afb35adf87cb"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6128b610b577eed1e89ac7177ab0c33d06ade2aba93f5c89306032306b5f1c6"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1e550a45d2ff87a1c11b42015107f1778c93f4c6f8e731bf1b8fa770321b8cc4"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:785ae09276342f79fd8092633e2d52c0f7c44d56e8cfda8274ccc9b76612dba2"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0f4be3703a867ef976434afd3661a33884abe73ceb4ee436cac49d3b4c2aaa7b"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e513983830c4ff1f205ab97152a0050cf7164f1b4783d702256d39c637b9d107"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9135c300535c828c0bae311b659f33a31c941572eae278568d1a953c4a57b59"}, + {file = "mmh3-5.1.0-cp313-cp313-win32.whl", hash = "sha256:c65dbd12885a5598b70140d24de5839551af5a99b29f9804bb2484b29ef07692"}, + {file = "mmh3-5.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:10db7765201fc65003fa998faa067417ef6283eb5f9bba8f323c48fd9c33e91f"}, + {file = "mmh3-5.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:b22fe2e54be81f6c07dcb36b96fa250fb72effe08aa52fbb83eade6e1e2d5fd7"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:166b67749a1d8c93b06f5e90576f1ba838a65c8e79f28ffd9dfafba7c7d0a084"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adba83c7ba5cc8ea201ee1e235f8413a68e7f7b8a657d582cc6c6c9d73f2830e"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a61f434736106804eb0b1612d503c4e6eb22ba31b16e6a2f987473de4226fa55"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba9ce59816b30866093f048b3312c2204ff59806d3a02adee71ff7bd22b87554"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd51597bef1e503363b05cb579db09269e6e6c39d419486626b255048daf545b"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d51a1ed642d3fb37b8f4cab966811c52eb246c3e1740985f701ef5ad4cdd2145"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:709bfe81c53bf8a3609efcbd65c72305ade60944f66138f697eefc1a86b6e356"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e01a9b0092b6f82e861137c8e9bb9899375125b24012eb5219e61708be320032"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:27e46a2c13c9a805e03c9ec7de0ca8e096794688ab2125bdce4229daf60c4a56"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5766299c1d26f6bfd0a638e070bd17dbd98d4ccb067d64db3745bf178e700ef0"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7785205e3e4443fdcbb73766798c7647f94c2f538b90f666688f3e757546069e"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8e574fbd39afb433b3ab95683b1b4bf18313dc46456fc9daaddc2693c19ca565"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1b6727a5a20e32cbf605743749f3862abe5f5e097cbf2afc7be5aafd32a549ae"}, + {file = "mmh3-5.1.0-cp39-cp39-win32.whl", hash = "sha256:d6eaa711d4b9220fe5252032a44bf68e5dcfb7b21745a96efc9e769b0dd57ec2"}, + {file = "mmh3-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:49d444913f6c02980e5241a53fe9af2338f2043d6ce5b6f5ea7d302c52c604ac"}, + {file = "mmh3-5.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:0daaeaedd78773b70378f2413c7d6b10239a75d955d30d54f460fb25d599942d"}, + {file = "mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c"}, ] [package.extras] -benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.9.0)", "xxhash (==3.5.0)"] -docs = ["myst-parser (==4.0.1)", "shibuya (==2025.7.24)", "sphinx (==8.2.3)", "sphinx-copybutton (==0.5.2)"] -lint = ["black (==25.1.0)", "clang-format (==20.1.8)", "isort (==6.0.1)", "pylint (==3.3.7)"] -plot = ["matplotlib (==3.10.3)", "pandas (==2.3.1)"] -test = ["pytest (==8.4.1)", "pytest-sugar (==1.0.0)"] -type = ["mypy (==1.17.0)"] +benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.8.1)", "xxhash (==3.5.0)"] +docs = ["myst-parser (==4.0.0)", "shibuya (==2024.12.21)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)"] +lint = ["black (==24.10.0)", "clang-format (==19.1.7)", "isort (==5.13.2)", "pylint (==3.3.3)"] +plot = ["matplotlib (==3.10.0)", "pandas (==2.2.3)"] +test = ["pytest (==8.3.4)", "pytest-sugar (==1.0.0)"] +type = ["mypy (==1.14.1)"] [[package]] name = "moto" -version = "5.1.10" +version = "5.0.28" description = "A library that allows you to easily mock out tests based on AWS infrastructure" optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.8" files = [ - {file = "moto-5.1.10-py3-none-any.whl", hash = "sha256:9ec1a21a924f97470af225b2bfa854fe46c1ad30fb44655eba458206dedf28b5"}, - {file = "moto-5.1.10.tar.gz", hash = "sha256:d6bdc8f82a1e503502927cc0a3da22014f836094d0bf399bb0f695754ae6c7a6"}, + {file = "moto-5.0.28-py3-none-any.whl", hash = "sha256:2dfbea1afe3b593e13192059a1a7fc4b3cf7fdf92e432070c22346efa45aa0f0"}, + {file = "moto-5.0.28.tar.gz", hash = "sha256:4d3437693411ec943c13c77de5b0b520c4b0a9ac850fead4ba2a54709e086e8b"}, ] [package.dependencies] antlr4-python3-runtime = {version = "*", optional = true, markers = "extra == \"server\""} aws-xray-sdk = {version = ">=0.93,<0.96 || >0.96", optional = true, markers = "extra == \"server\""} boto3 = ">=1.9.201" -botocore = ">=1.20.88,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" +botocore = ">=1.14.0,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"server\""} cryptography = ">=35.0.0" docker = {version = ">=3.0.0", optional = true, markers = "extra == \"server\""} @@ -3062,7 +2619,7 @@ flask-cors = {version = "*", optional = true, markers = "extra == \"server\""} graphql-core = {version = "*", optional = true, markers = "extra == \"server\""} Jinja2 = ">=2.10.1" joserfc = {version = ">=0.9.0", optional = true, markers = "extra == \"server\""} -jsonpath_ng = {version = "*", optional = true, markers = "extra == \"server\""} +jsonpath-ng = {version = "*", optional = true, markers = "extra == \"server\""} openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"server\""} py-partiql-parser = {version = "0.6.1", optional = true, markers = "extra == \"server\""} pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"server\""} @@ -3075,7 +2632,7 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath-ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] @@ -3085,16 +2642,16 @@ cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (> cognitoidp = ["joserfc (>=0.9.0)"] dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.1)"] dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.1)"] -events = ["jsonpath_ng"] +events = ["jsonpath-ng"] glue = ["pyparsing (>=3.0.7)"] -proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] quicksight = ["jsonschema"] resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)"] s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.6.1)"] s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.6.1)"] -server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.1)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] -stepfunctions = ["antlr4-python3-runtime", "jsonpath_ng"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] [[package]] @@ -3103,7 +2660,6 @@ version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -3112,268 +2668,229 @@ files = [ [package.extras] develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] +gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.32.3" +version = "1.31.1" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"adlfs\"" files = [ - {file = "msal-1.32.3-py3-none-any.whl", hash = "sha256:b2798db57760b1961b142f027ffb7c8169536bf77316e99a0df5c4aaebb11569"}, - {file = "msal-1.32.3.tar.gz", hash = "sha256:5eea038689c78a5a70ca8ecbe1245458b55a857bd096efb6989c69ba15985d35"}, + {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, + {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, ] [package.dependencies] -cryptography = ">=2.5,<47" +cryptography = ">=2.5,<46" PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} requests = ">=2.0.0,<3" [package.extras] -broker = ["pymsalruntime (>=0.14,<0.18) ; python_version >= \"3.6\" and platform_system == \"Windows\"", "pymsalruntime (>=0.17,<0.18) ; python_version >= \"3.8\" and platform_system == \"Darwin\""] +broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"] [[package]] name = "msal-extensions" -version = "1.3.1" +version = "1.2.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\"" +python-versions = ">=3.7" files = [ - {file = "msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca"}, - {file = "msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4"}, + {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, + {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, ] [package.dependencies] msal = ">=1.29,<2" - -[package.extras] -portalocker = ["portalocker (>=1.4,<4)"] +portalocker = ">=1.4,<3" [[package]] name = "msgpack" -version = "1.1.1" +version = "1.1.0" description = "MessagePack serializer" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"ray\"" -files = [ - {file = "msgpack-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:353b6fc0c36fde68b661a12949d7d49f8f51ff5fa019c1e47c87c4ff34b080ed"}, - {file = "msgpack-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:79c408fcf76a958491b4e3b103d1c417044544b68e96d06432a189b43d1215c8"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78426096939c2c7482bf31ef15ca219a9e24460289c00dd0b94411040bb73ad2"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b17ba27727a36cb73aabacaa44b13090feb88a01d012c0f4be70c00f75048b4"}, - {file = "msgpack-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a17ac1ea6ec3c7687d70201cfda3b1e8061466f28f686c24f627cae4ea8efd0"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88d1e966c9235c1d4e2afac21ca83933ba59537e2e2727a999bf3f515ca2af26"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6d58656842e1b2ddbe07f43f56b10a60f2ba5826164910968f5933e5178af75"}, - {file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96decdfc4adcbc087f5ea7ebdcfd3dee9a13358cae6e81d54be962efc38f6338"}, - {file = "msgpack-1.1.1-cp310-cp310-win32.whl", hash = "sha256:6640fd979ca9a212e4bcdf6eb74051ade2c690b862b679bfcb60ae46e6dc4bfd"}, - {file = "msgpack-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:8b65b53204fe1bd037c40c4148d00ef918eb2108d24c9aaa20bc31f9810ce0a8"}, - {file = "msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558"}, - {file = "msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f"}, - {file = "msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2"}, - {file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752"}, - {file = "msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295"}, - {file = "msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458"}, - {file = "msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238"}, - {file = "msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a"}, - {file = "msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef"}, - {file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a"}, - {file = "msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c"}, - {file = "msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4"}, - {file = "msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0"}, - {file = "msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a"}, - {file = "msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7"}, - {file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5"}, - {file = "msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323"}, - {file = "msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba1be28247e68994355e028dcd668316db30c1f758d3241a7b903ac78dcd285"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f93dcddb243159c9e4109c9750ba5b335ab8d48d9522c5308cd05d7e3ce600"}, - {file = "msgpack-1.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fbbc0b906a24038c9958a1ba7ae0918ad35b06cb449d398b76a7d08470b0ed9"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:61e35a55a546a1690d9d09effaa436c25ae6130573b6ee9829c37ef0f18d5e78"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1abfc6e949b352dadf4bce0eb78023212ec5ac42f6abfd469ce91d783c149c2a"}, - {file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:996f2609ddf0142daba4cefd767d6db26958aac8439ee41db9cc0db9f4c4c3a6"}, - {file = "msgpack-1.1.1-cp38-cp38-win32.whl", hash = "sha256:4d3237b224b930d58e9d83c81c0dba7aacc20fcc2f89c1e5423aa0529a4cd142"}, - {file = "msgpack-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:da8f41e602574ece93dbbda1fab24650d6bf2a24089f9e9dbb4f5730ec1e58ad"}, - {file = "msgpack-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5be6b6bc52fad84d010cb45433720327ce886009d862f46b26d4d154001994b"}, - {file = "msgpack-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a89cd8c087ea67e64844287ea52888239cbd2940884eafd2dcd25754fb72232"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d75f3807a9900a7d575d8d6674a3a47e9f227e8716256f35bc6f03fc597ffbf"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d182dac0221eb8faef2e6f44701812b467c02674a322c739355c39e94730cdbf"}, - {file = "msgpack-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b13fe0fb4aac1aa5320cd693b297fe6fdef0e7bea5518cbc2dd5299f873ae90"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:435807eeb1bc791ceb3247d13c79868deb22184e1fc4224808750f0d7d1affc1"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4835d17af722609a45e16037bb1d4d78b7bdf19d6c0128116d178956618c4e88"}, - {file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8ef6e342c137888ebbfb233e02b8fbd689bb5b5fcc59b34711ac47ebd504478"}, - {file = "msgpack-1.1.1-cp39-cp39-win32.whl", hash = "sha256:61abccf9de335d9efd149e2fff97ed5974f2481b3353772e8e2dd3402ba2bd57"}, - {file = "msgpack-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:40eae974c873b2992fd36424a5d9407f93e97656d999f43fca9d29f820899084"}, - {file = "msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd"}, +files = [ + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, ] [[package]] name = "multidict" -version = "6.6.3" +version = "6.1.0" description = "multidict implementation" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\"" -files = [ - {file = "multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817"}, - {file = "multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140"}, - {file = "multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14"}, - {file = "multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a"}, - {file = "multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69"}, - {file = "multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c"}, - {file = "multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751"}, - {file = "multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8"}, - {file = "multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55"}, - {file = "multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7"}, - {file = "multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb"}, - {file = "multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c"}, - {file = "multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c"}, - {file = "multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61"}, - {file = "multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b"}, - {file = "multidict-6.6.3-cp310-cp310-win32.whl", hash = "sha256:20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318"}, - {file = "multidict-6.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485"}, - {file = "multidict-6.6.3-cp310-cp310-win_arm64.whl", hash = "sha256:769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5"}, - {file = "multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c"}, - {file = "multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df"}, - {file = "multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d"}, - {file = "multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539"}, - {file = "multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462"}, - {file = "multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9"}, - {file = "multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7"}, - {file = "multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9"}, - {file = "multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821"}, - {file = "multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d"}, - {file = "multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6"}, - {file = "multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430"}, - {file = "multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b"}, - {file = "multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56"}, - {file = "multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183"}, - {file = "multidict-6.6.3-cp311-cp311-win32.whl", hash = "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5"}, - {file = "multidict-6.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2"}, - {file = "multidict-6.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb"}, - {file = "multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6"}, - {file = "multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f"}, - {file = "multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55"}, - {file = "multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b"}, - {file = "multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888"}, - {file = "multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d"}, - {file = "multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680"}, - {file = "multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a"}, - {file = "multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961"}, - {file = "multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65"}, - {file = "multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643"}, - {file = "multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063"}, - {file = "multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3"}, - {file = "multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75"}, - {file = "multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10"}, - {file = "multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5"}, - {file = "multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17"}, - {file = "multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b"}, - {file = "multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55"}, - {file = "multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b"}, - {file = "multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65"}, - {file = "multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3"}, - {file = "multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c"}, - {file = "multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6"}, - {file = "multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8"}, - {file = "multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca"}, - {file = "multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884"}, - {file = "multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7"}, - {file = "multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b"}, - {file = "multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c"}, - {file = "multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b"}, - {file = "multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1"}, - {file = "multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6"}, - {file = "multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e"}, - {file = "multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9"}, - {file = "multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600"}, - {file = "multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134"}, - {file = "multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37"}, - {file = "multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8"}, - {file = "multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1"}, - {file = "multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373"}, - {file = "multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e"}, - {file = "multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f"}, - {file = "multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0"}, - {file = "multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc"}, - {file = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f"}, - {file = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471"}, - {file = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2"}, - {file = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648"}, - {file = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d"}, - {file = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c"}, - {file = "multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e"}, - {file = "multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d"}, - {file = "multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb"}, - {file = "multidict-6.6.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c8161b5a7778d3137ea2ee7ae8a08cce0010de3b00ac671c5ebddeaa17cefd22"}, - {file = "multidict-6.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1328201ee930f069961ae707d59c6627ac92e351ed5b92397cf534d1336ce557"}, - {file = "multidict-6.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b1db4d2093d6b235de76932febf9d50766cf49a5692277b2c28a501c9637f616"}, - {file = "multidict-6.6.3-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53becb01dd8ebd19d1724bebe369cfa87e4e7f29abbbe5c14c98ce4c383e16cd"}, - {file = "multidict-6.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41bb9d1d4c303886e2d85bade86e59885112a7f4277af5ad47ab919a2251f306"}, - {file = "multidict-6.6.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:775b464d31dac90f23192af9c291dc9f423101857e33e9ebf0020a10bfcf4144"}, - {file = "multidict-6.6.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d04d01f0a913202205a598246cf77826fe3baa5a63e9f6ccf1ab0601cf56eca0"}, - {file = "multidict-6.6.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d25594d3b38a2e6cabfdcafef339f754ca6e81fbbdb6650ad773ea9775af35ab"}, - {file = "multidict-6.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:35712f1748d409e0707b165bf49f9f17f9e28ae85470c41615778f8d4f7d9609"}, - {file = "multidict-6.6.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1c8082e5814b662de8589d6a06c17e77940d5539080cbab9fe6794b5241b76d9"}, - {file = "multidict-6.6.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:61af8a4b771f1d4d000b3168c12c3120ccf7284502a94aa58c68a81f5afac090"}, - {file = "multidict-6.6.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:448e4a9afccbf297577f2eaa586f07067441e7b63c8362a3540ba5a38dc0f14a"}, - {file = "multidict-6.6.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:233ad16999afc2bbd3e534ad8dbe685ef8ee49a37dbc2cdc9514e57b6d589ced"}, - {file = "multidict-6.6.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:bb933c891cd4da6bdcc9733d048e994e22e1883287ff7540c2a0f3b117605092"}, - {file = "multidict-6.6.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:37b09ca60998e87734699e88c2363abfd457ed18cfbf88e4009a4e83788e63ed"}, - {file = "multidict-6.6.3-cp39-cp39-win32.whl", hash = "sha256:f54cb79d26d0cd420637d184af38f0668558f3c4bbe22ab7ad830e67249f2e0b"}, - {file = "multidict-6.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:295adc9c0551e5d5214b45cf29ca23dbc28c2d197a9c30d51aed9e037cb7c578"}, - {file = "multidict-6.6.3-cp39-cp39-win_arm64.whl", hash = "sha256:15332783596f227db50fb261c2c251a58ac3873c457f3a550a95d5c0aa3c770d"}, - {file = "multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a"}, - {file = "multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "mypy-boto3-dynamodb" -version = "1.40.0" -description = "Type annotations for boto3 DynamoDB 1.40.0 service generated with mypy-boto3-builder 8.11.0" -optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "mypy_boto3_dynamodb-1.40.0-py3-none-any.whl", hash = "sha256:b7b0c02e58d1c2323378a9c648c39c68bef867cf7da2721ea257e1c6aaa3d229"}, - {file = "mypy_boto3_dynamodb-1.40.0.tar.gz", hash = "sha256:97f65006a1706f7cbdf53ad1c3a9914e10b53754194db4ad12004eca7c376b4e"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] [package.dependencies] -typing-extensions = {version = "*", markers = "python_version < \"3.12\""} +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "mypy-boto3-glue" -version = "1.40.5" -description = "Type annotations for boto3 Glue 1.40.5 service generated with mypy-boto3-builder 8.11.0" -optional = false +version = "1.36.4" +description = "Type annotations for boto3 Glue 1.36.4 service generated with mypy-boto3-builder 8.8.0" +optional = true python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "mypy_boto3_glue-1.40.5-py3-none-any.whl", hash = "sha256:a86c478345545b5c55adf8eef2648c3663161a678fa7313f019795a725768e06"}, - {file = "mypy_boto3_glue-1.40.5.tar.gz", hash = "sha256:bb1de6748dae6685aa10dee9f32be658cf05df1f16cd05096892b58f87d8bd71"}, + {file = "mypy_boto3_glue-1.36.4-py3-none-any.whl", hash = "sha256:ae420af4301fbe84a6e38b244901cfa98c9162c646fb621d0f9f39a918e34cef"}, + {file = "mypy_boto3_glue-1.36.4.tar.gz", hash = "sha256:6f8630ccde28bcd346ca0fc60c33a394aa3a6a7c878dd0eb22e255cb464ed5f4"}, ] [package.dependencies] @@ -3385,7 +2902,6 @@ version = "8.4.0" description = "Simple yet flexible natural sorting in Python." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"}, {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"}, @@ -3401,8 +2917,6 @@ version = "3.2.1" description = "Python package for creating and manipulating graphs and networks" optional = false python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -3415,141 +2929,23 @@ doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9. extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] -[[package]] -name = "networkx" -version = "3.4.2" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -groups = ["dev"] -markers = "python_version == \"3.10\"" -files = [ - {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, - {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, -] - -[package.extras] -default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] -extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "networkx" -version = "3.5" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.11" -groups = ["dev"] -markers = "python_version >= \"3.11\"" -files = [ - {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, - {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, -] - -[package.extras] -default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] -developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] -extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] -test-extras = ["pytest-mpl", "pytest-randomly"] - [[package]] name = "nodeenv" version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[[package]] -name = "numba" -version = "0.60.0" -description = "compiling Python code using LLVM" -optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version < \"3.10\" and extra == \"bodo\"" -files = [ - {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, - {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, - {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, - {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, - {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, - {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, - {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, - {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, - {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, - {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, - {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, - {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, - {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, -] - -[package.dependencies] -llvmlite = "==0.43.*" -numpy = ">=1.22,<2.1" - -[[package]] -name = "numba" -version = "0.61.2" -description = "compiling Python code using LLVM" -optional = true -python-versions = ">=3.10" -groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"bodo\"" -files = [ - {file = "numba-0.61.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:cf9f9fc00d6eca0c23fc840817ce9f439b9f03c8f03d6246c0e7f0cb15b7162a"}, - {file = "numba-0.61.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ea0247617edcb5dd61f6106a56255baab031acc4257bddaeddb3a1003b4ca3fd"}, - {file = "numba-0.61.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ae8c7a522c26215d5f62ebec436e3d341f7f590079245a2f1008dfd498cc1642"}, - {file = "numba-0.61.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bd1e74609855aa43661edffca37346e4e8462f6903889917e9f41db40907daa2"}, - {file = "numba-0.61.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae45830b129c6137294093b269ef0a22998ccc27bf7cf096ab8dcf7bca8946f9"}, - {file = "numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2"}, - {file = "numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b"}, - {file = "numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60"}, - {file = "numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18"}, - {file = "numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1"}, - {file = "numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2"}, - {file = "numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8"}, - {file = "numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546"}, - {file = "numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd"}, - {file = "numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18"}, - {file = "numba-0.61.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:3a10a8fc9afac40b1eac55717cece1b8b1ac0b946f5065c89e00bde646b5b154"}, - {file = "numba-0.61.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d3bcada3c9afba3bed413fba45845f2fb9cd0d2b27dd58a1be90257e293d140"}, - {file = "numba-0.61.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bdbca73ad81fa196bd53dc12e3aaf1564ae036e0c125f237c7644fe64a4928ab"}, - {file = "numba-0.61.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5f154aaea625fb32cfbe3b80c5456d514d416fcdf79733dd69c0df3a11348e9e"}, - {file = "numba-0.61.2-cp313-cp313-win_amd64.whl", hash = "sha256:59321215e2e0ac5fa928a8020ab00b8e57cda8a97384963ac0dfa4d4e6aa54e7"}, - {file = "numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d"}, -] - -[package.dependencies] -llvmlite = "==0.44.*" -numpy = ">=1.24,<2.3" - [[package]] name = "numpy" version = "2.0.2" description = "Fundamental package for array computing in Python" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "python_version < \"3.10\" and (extra == \"bodo\" or extra == \"pandas\" or extra == \"ray\")" files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -3600,81 +2996,77 @@ files = [ [[package]] name = "numpy" -version = "2.2.6" +version = "2.2.2" description = "Fundamental package for array computing in Python" optional = true python-versions = ">=3.10" -groups = ["main"] -markers = "python_version >= \"3.10\" and (extra == \"bodo\" or extra == \"pandas\" or extra == \"ray\")" -files = [ - {file = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}, - {file = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}, - {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"}, - {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"}, - {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"}, - {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"}, - {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"}, - {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"}, - {file = "numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"}, - {file = "numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"}, - {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"}, - {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"}, - {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"}, - {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"}, - {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"}, - {file = "numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"}, - {file = "numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"}, - {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"}, - {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"}, - {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"}, - {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"}, - {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}, - {file = "numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}, - {file = "numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"}, - {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"}, - {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"}, - {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"}, - {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"}, - {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"}, - {file = "numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"}, - {file = "numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"}, - {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"}, - {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"}, - {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"}, - {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"}, - {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"}, - {file = "numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"}, - {file = "numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}, - {file = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}, - {file = "numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}, +files = [ + {file = "numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:40c7ff5da22cd391944a28c6a9c638a5eef77fcf71d6e3a79e1d9d9e82752715"}, + {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:995f9e8181723852ca458e22de5d9b7d3ba4da3f11cc1cb113f093b271d7965a"}, + {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78ea78450fd96a498f50ee096f69c75379af5138f7881a51355ab0e11286c97"}, + {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fbe72d347fbc59f94124125e73fc4976a06927ebc503ec5afbfb35f193cd957"}, + {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8e6da5cffbbe571f93588f562ed130ea63ee206d12851b60819512dd3e1ba50d"}, + {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:09d6a2032faf25e8d0cadde7fd6145118ac55d2740132c1d845f98721b5ebcfd"}, + {file = "numpy-2.2.2-cp310-cp310-win32.whl", hash = "sha256:159ff6ee4c4a36a23fe01b7c3d07bd8c14cc433d9720f977fcd52c13c0098160"}, + {file = "numpy-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:64bd6e1762cd7f0986a740fee4dff927b9ec2c5e4d9a28d056eb17d332158014"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:642199e98af1bd2b6aeb8ecf726972d238c9877b0f6e8221ee5ab945ec8a2189"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d9fc9d812c81e6168b6d405bf00b8d6739a7f72ef22a9214c4241e0dc70b323"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c7d1fd447e33ee20c1f33f2c8e6634211124a9aabde3c617687d8b739aa69eac"}, + {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:451e854cfae0febe723077bd0cf0a4302a5d84ff25f0bfece8f29206c7bed02e"}, + {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd249bc894af67cbd8bad2c22e7cbcd46cf87ddfca1f1289d1e7e54868cc785c"}, + {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02935e2c3c0c6cbe9c7955a8efa8908dd4221d7755644c59d1bba28b94fd334f"}, + {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a972cec723e0563aa0823ee2ab1df0cb196ed0778f173b381c871a03719d4826"}, + {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6d6a0910c3b4368d89dde073e630882cdb266755565155bc33520283b2d9df8"}, + {file = "numpy-2.2.2-cp311-cp311-win32.whl", hash = "sha256:860fd59990c37c3ef913c3ae390b3929d005243acca1a86facb0773e2d8d9e50"}, + {file = "numpy-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:da1eeb460ecce8d5b8608826595c777728cdf28ce7b5a5a8c8ac8d949beadcf2"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ac9bea18d6d58a995fac1b2cb4488e17eceeac413af014b1dd26170b766d8467"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23ae9f0c2d889b7b2d88a3791f6c09e2ef827c2446f1c4a3e3e76328ee4afd9a"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3074634ea4d6df66be04f6728ee1d173cfded75d002c75fac79503a880bf3825"}, + {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ec0636d3f7d68520afc6ac2dc4b8341ddb725039de042faf0e311599f54eb37"}, + {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ffbb1acd69fdf8e89dd60ef6182ca90a743620957afb7066385a7bbe88dc748"}, + {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0349b025e15ea9d05c3d63f9657707a4e1d471128a3b1d876c095f328f8ff7f0"}, + {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:463247edcee4a5537841d5350bc87fe8e92d7dd0e8c71c995d2c6eecb8208278"}, + {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dd47ff0cb2a656ad69c38da850df3454da88ee9a6fde0ba79acceee0e79daba"}, + {file = "numpy-2.2.2-cp312-cp312-win32.whl", hash = "sha256:4525b88c11906d5ab1b0ec1f290996c0020dd318af8b49acaa46f198b1ffc283"}, + {file = "numpy-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:5acea83b801e98541619af398cc0109ff48016955cc0818f478ee9ef1c5c3dcb"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b208cfd4f5fe34e1535c08983a1a6803fdbc7a1e86cf13dd0c61de0b51a0aadc"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d0bbe7dd86dca64854f4b6ce2ea5c60b51e36dfd597300057cf473d3615f2369"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:22ea3bb552ade325530e72a0c557cdf2dea8914d3a5e1fecf58fa5dbcc6f43cd"}, + {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:128c41c085cab8a85dc29e66ed88c05613dccf6bc28b3866cd16050a2f5448be"}, + {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:250c16b277e3b809ac20d1f590716597481061b514223c7badb7a0f9993c7f84"}, + {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c8854b09bc4de7b041148d8550d3bd712b5c21ff6a8ed308085f190235d7ff"}, + {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b6fb9c32a91ec32a689ec6410def76443e3c750e7cfc3fb2206b985ffb2b85f0"}, + {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:57b4012e04cc12b78590a334907e01b3a85efb2107df2b8733ff1ed05fce71de"}, + {file = "numpy-2.2.2-cp313-cp313-win32.whl", hash = "sha256:4dbd80e453bd34bd003b16bd802fac70ad76bd463f81f0c518d1245b1c55e3d9"}, + {file = "numpy-2.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:5a8c863ceacae696aff37d1fd636121f1a512117652e5dfb86031c8d84836369"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b3482cb7b3325faa5f6bc179649406058253d91ceda359c104dac0ad320e1391"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9491100aba630910489c1d0158034e1c9a6546f0b1340f716d522dc103788e39"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:41184c416143defa34cc8eb9d070b0a5ba4f13a0fa96a709e20584638254b317"}, + {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:7dca87ca328f5ea7dafc907c5ec100d187911f94825f8700caac0b3f4c384b49"}, + {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bc61b307655d1a7f9f4b043628b9f2b721e80839914ede634e3d485913e1fb2"}, + {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fad446ad0bc886855ddf5909cbf8cb5d0faa637aaa6277fb4b19ade134ab3c7"}, + {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:149d1113ac15005652e8d0d3f6fd599360e1a708a4f98e43c9c77834a28238cb"}, + {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:106397dbbb1896f99e044efc90360d098b3335060375c26aa89c0d8a97c5f648"}, + {file = "numpy-2.2.2-cp313-cp313t-win32.whl", hash = "sha256:0eec19f8af947a61e968d5429f0bd92fec46d92b0008d0a6685b40d6adf8a4f4"}, + {file = "numpy-2.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:97b974d3ba0fb4612b77ed35d7627490e8e3dff56ab41454d9e8b23448940576"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b0531f0b0e07643eb089df4c509d30d72c9ef40defa53e41363eca8a8cc61495"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e9e82dcb3f2ebbc8cb5ce1102d5f1c5ed236bf8a11730fb45ba82e2841ec21df"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d4142eb40ca6f94539e4db929410f2a46052a0fe7a2c1c59f6179c39938d2a"}, + {file = "numpy-2.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:356ca982c188acbfa6af0d694284d8cf20e95b1c3d0aefa8929376fea9146f60"}, + {file = "numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f"}, ] [[package]] name = "oauthlib" -version = "3.3.1" +version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"gcsfs\"" +python-versions = ">=3.6" files = [ - {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, - {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, ] [package.extras] @@ -3688,7 +3080,6 @@ version = "0.6.3" description = "OpenAPI schema validation for Python" optional = false python-versions = "<4.0.0,>=3.8.0" -groups = ["dev"] files = [ {file = "openapi_schema_validator-0.6.3-py3-none-any.whl", hash = "sha256:f3b9870f4e556b5a62a1c39da72a6b4b16f3ad9c73dc80084b1b11e74ba148a3"}, {file = "openapi_schema_validator-0.6.3.tar.gz", hash = "sha256:f37bace4fc2a5d96692f4f8b31dc0f8d7400fd04f3a937798eaf880d425de6ee"}, @@ -3701,14 +3092,13 @@ rfc3339-validator = "*" [[package]] name = "openapi-spec-validator" -version = "0.7.2" +version = "0.7.1" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" optional = false -python-versions = "<4.0.0,>=3.8.0" -groups = ["dev"] +python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "openapi_spec_validator-0.7.2-py3-none-any.whl", hash = "sha256:4bbdc0894ec85f1d1bea1d6d9c8b2c3c8d7ccaa13577ef40da9c006c9fd0eb60"}, - {file = "openapi_spec_validator-0.7.2.tar.gz", hash = "sha256:cc029309b5c5dbc7859df0372d55e9d1ff43e96d678b9ba087f7c56fc586f734"}, + {file = "openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959"}, + {file = "openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"}, ] [package.dependencies] @@ -3719,16 +3109,14 @@ openapi-schema-validator = ">=0.6.0,<0.7.0" [[package]] name = "packaging" -version = "25.0" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "docs"] files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -markers = {main = "extra == \"ray\" or extra == \"hf\""} [[package]] name = "paginate" @@ -3736,7 +3124,6 @@ version = "0.5.7" description = "Divides large result sets into pages for easier browsing" optional = false python-versions = "*" -groups = ["docs"] files = [ {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, @@ -3748,55 +3135,53 @@ lint = ["black"] [[package]] name = "pandas" -version = "2.3.1" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"bodo\" or extra == \"pandas\" or extra == \"ray\"" -files = [ - {file = "pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9"}, - {file = "pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1"}, - {file = "pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0"}, - {file = "pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191"}, - {file = "pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1"}, - {file = "pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97"}, - {file = "pandas-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83"}, - {file = "pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b"}, - {file = "pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f"}, - {file = "pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85"}, - {file = "pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d"}, - {file = "pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678"}, - {file = "pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299"}, - {file = "pandas-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab"}, - {file = "pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:689968e841136f9e542020698ee1c4fbe9caa2ed2213ae2388dc7b81721510d3"}, - {file = "pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:025e92411c16cbe5bb2a4abc99732a6b132f439b8aab23a59fa593eb00704232"}, - {file = "pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b7ff55f31c4fcb3e316e8f7fa194566b286d6ac430afec0d461163312c5841e"}, - {file = "pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dcb79bf373a47d2a40cf7232928eb7540155abbc460925c2c96d2d30b006eb4"}, - {file = "pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8"}, - {file = "pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679"}, - {file = "pandas-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8"}, - {file = "pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22"}, - {file = "pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a"}, - {file = "pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928"}, - {file = "pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9"}, - {file = "pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12"}, - {file = "pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb"}, - {file = "pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956"}, - {file = "pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a"}, - {file = "pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9"}, - {file = "pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275"}, - {file = "pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab"}, - {file = "pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96"}, - {file = "pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444"}, - {file = "pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8"}, - {file = "pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3"}, - {file = "pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da"}, - {file = "pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e"}, - {file = "pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7"}, - {file = "pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88"}, - {file = "pandas-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d"}, - {file = "pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2"}, +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -3840,7 +3225,6 @@ version = "0.4.4" description = "Object-oriented paths" optional = false python-versions = "<4.0.0,>=3.7.0" -groups = ["dev"] files = [ {file = "pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2"}, {file = "pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2"}, @@ -3852,7 +3236,6 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -3860,36 +3243,34 @@ files = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, - {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.6.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.8" files = [ - {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, - {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["coverage", "pytest", "pytest-benchmark"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "ply" @@ -3897,7 +3278,6 @@ version = "3.11" description = "Python Lex & Yacc" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, @@ -3905,20 +3285,18 @@ files = [ [[package]] name = "polars" -version = "1.32.2" +version = "1.22.0" description = "Blazingly fast DataFrame library" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"polars\"" files = [ - {file = "polars-1.32.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f21da6a5210898ec800b7e9e667fb53eb9161b7ceb812ee6555ff5661a00e517"}, - {file = "polars-1.32.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:d3f4e061312ef6c2a907378ce407a6132734fe1a13f261a1984a1a9ca2f6febc"}, - {file = "polars-1.32.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a711a750cfc19f1f883d2b46895dd698abf4d446ca41c3bf510ced0ff1178057"}, - {file = "polars-1.32.2-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d1c53a828eedc215fb0dabc7cef02c6f4ad042157512ddb99840fd42b8da1e8a"}, - {file = "polars-1.32.2-cp39-abi3-win_amd64.whl", hash = "sha256:5e1660a584e89e1d60cd89984feca38a695e491a966581fefe8be99c230ea154"}, - {file = "polars-1.32.2-cp39-abi3-win_arm64.whl", hash = "sha256:cd390364f6f3927474bd0aed255103195b9d2b3eef0f0c5bb429db5e6311615e"}, - {file = "polars-1.32.2.tar.gz", hash = "sha256:b4c5cefc7cf7a2461f8800cf2c09976c47cb1fd959c6ef3024d5618b497f05d3"}, + {file = "polars-1.22.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6250f838b916fab23ccafe90928d7952afc328d316c956b42d152b20c86ffd9c"}, + {file = "polars-1.22.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:5ee3cf3783205709ce31f070f2b4ee4296fec08f2c744a9c37acc7d360121022"}, + {file = "polars-1.22.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f25b4ef131da046d05b8235c5f29997630ee2125ebc0553b92258e88f7a8fa"}, + {file = "polars-1.22.0-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:729e6be8a884812a206518195a2fb407b61962323886095ede1a2a934cdb1410"}, + {file = "polars-1.22.0-cp39-abi3-win_amd64.whl", hash = "sha256:78b8bcd1735e9376815d117aeae49391441b2199b5a70a300669d692b34ec713"}, + {file = "polars-1.22.0-cp39-abi3-win_arm64.whl", hash = "sha256:cde8f56c408151ab9790c43485b90f690d5c198ce26ab38a845045c73c999325"}, + {file = "polars-1.22.0.tar.gz", hash = "sha256:8d94ae25085d92de10d93ab6a06c94f8c911bd5d9c1ff17cd1073a9dca766029"}, ] [package.extras] @@ -3929,35 +3307,52 @@ calamine = ["fastexcel (>=0.9)"] cloudpickle = ["cloudpickle"] connectorx = ["connectorx (>=0.3.2)"] database = ["polars[adbc,connectorx,sqlalchemy]"] -deltalake = ["deltalake (>=1.0.0)"] +deltalake = ["deltalake (>=0.19.0)"] excel = ["polars[calamine,openpyxl,xlsx2csv,xlsxwriter]"] fsspec = ["fsspec"] gpu = ["cudf-polars-cu12"] graph = ["matplotlib"] -iceberg = ["pyiceberg (>=0.7.1)"] +iceberg = ["pyiceberg (>=0.5.0)"] numpy = ["numpy (>=1.16.0)"] openpyxl = ["openpyxl (>=3.0.0)"] pandas = ["pandas", "polars[pyarrow]"] plot = ["altair (>=5.4.0)"] -polars-cloud = ["polars-cloud (>=0.0.1a1)"] pyarrow = ["pyarrow (>=7.0.0)"] pydantic = ["pydantic"] sqlalchemy = ["polars[pandas]", "sqlalchemy"] style = ["great-tables (>=0.8.0)"] -timezone = ["tzdata ; platform_system == \"Windows\""] +timezone = ["tzdata"] xlsx2csv = ["xlsx2csv (>=0.8.0)"] xlsxwriter = ["xlsxwriter"] +[[package]] +name = "portalocker" +version = "2.10.1" +description = "Wraps the portalocker recipe for easy usage" +optional = true +python-versions = ">=3.8" +files = [ + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + [[package]] name = "pre-commit" -version = "4.3.0" +version = "4.1.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8"}, - {file = "pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16"}, + {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, + {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, ] [package.dependencies] @@ -3969,185 +3364,138 @@ virtualenv = ">=20.10.0" [[package]] name = "propcache" -version = "0.3.2" +version = "0.2.1" description = "Accelerated property cache" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\"" -files = [ - {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}, - {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}, - {file = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"}, - {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"}, - {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"}, - {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"}, - {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"}, - {file = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"}, - {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"}, - {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"}, - {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"}, - {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"}, - {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"}, - {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"}, - {file = "propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}, - {file = "propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}, - {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"}, - {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"}, - {file = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"}, - {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"}, - {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"}, - {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"}, - {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"}, - {file = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"}, - {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"}, - {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"}, - {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"}, - {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"}, - {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"}, - {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"}, - {file = "propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"}, - {file = "propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"}, - {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10"}, - {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154"}, - {file = "propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615"}, - {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db"}, - {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1"}, - {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c"}, - {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67"}, - {file = "propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b"}, - {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8"}, - {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251"}, - {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474"}, - {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535"}, - {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06"}, - {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}, - {file = "propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}, - {file = "propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}, - {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945"}, - {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252"}, - {file = "propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f"}, - {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33"}, - {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e"}, - {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1"}, - {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3"}, - {file = "propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1"}, - {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6"}, - {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387"}, - {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4"}, - {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88"}, - {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206"}, - {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43"}, - {file = "propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02"}, - {file = "propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05"}, - {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b"}, - {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0"}, - {file = "propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e"}, - {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28"}, - {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a"}, - {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c"}, - {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725"}, - {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892"}, - {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44"}, - {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe"}, - {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81"}, - {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba"}, - {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770"}, - {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330"}, - {file = "propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394"}, - {file = "propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198"}, - {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"}, - {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"}, - {file = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"}, - {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"}, - {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"}, - {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"}, - {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"}, - {file = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"}, - {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"}, - {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"}, - {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"}, - {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"}, - {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"}, - {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"}, - {file = "propcache-0.3.2-cp39-cp39-win32.whl", hash = "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"}, - {file = "propcache-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"}, - {file = "propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}, - {file = "propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"}, +files = [ + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"}, + {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"}, + {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"}, + {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"}, + {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"}, + {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"}, + {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"}, + {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"}, + {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"}, + {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"}, + {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"}, + {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"}, + {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, ] [[package]] name = "proto-plus" -version = "1.26.1" +version = "1.26.0" description = "Beautiful, Pythonic protocol buffers" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ - {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, - {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, + {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, + {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, ] [package.dependencies] -protobuf = ">=3.19.0,<7.0.0" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "6.31.1" +version = "5.29.3" description = "" optional = true -python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"ray\" or extra == \"gcsfs\"" -files = [ - {file = "protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"}, - {file = "protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"}, - {file = "protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402"}, - {file = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"}, - {file = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"}, - {file = "protobuf-6.31.1-cp39-cp39-win32.whl", hash = "sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16"}, - {file = "protobuf-6.31.1-cp39-cp39-win_amd64.whl", hash = "sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9"}, - {file = "protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"}, - {file = "protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a"}, -] - -[[package]] -name = "psutil" -version = "7.0.0" -description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." -optional = true -python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"bodo\"" +python-versions = ">=3.8" files = [ - {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, - {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, - {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, - {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, - {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, - {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, - {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, + {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, + {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, + {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, + {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, + {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, + {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, + {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, + {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, + {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, + {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, + {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, ] -[package.extras] -dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] -test = ["pytest", "pytest-xdist", "setuptools"] - [[package]] name = "psycopg2-binary" version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"sql-postgres\"" files = [ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, @@ -4225,8 +3573,6 @@ version = "0.6.2" description = "Pure Python client SASL implementation" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"hive-kerberos\"" files = [ {file = "pure-sasl-0.6.2.tar.gz", hash = "sha256:53c1355f5da95e2b85b2cc9a6af435518edc20c81193faa0eea65fdc835138f4"}, {file = "pure_sasl-0.6.2-py2-none-any.whl", hash = "sha256:edb33b1a46eb3c602c0166de0442c0fb41f5ac2bfccbde4775183b105ad89ab2"}, @@ -4241,7 +3587,6 @@ version = "0.6.1" description = "Pure Python PartiQL Parser" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "py_partiql_parser-0.6.1-py2.py3-none-any.whl", hash = "sha256:ff6a48067bff23c37e9044021bf1d949c83e195490c17e020715e927fe5b2456"}, {file = "py_partiql_parser-0.6.1.tar.gz", hash = "sha256:8583ff2a0e15560ef3bc3df109a7714d17f87d81d33e8c38b7fed4e58a63215d"}, @@ -4256,7 +3601,6 @@ version = "0.10.9.7" description = "Enables Python programs to dynamically access arbitrary Java objects" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "py4j-0.10.9.7-py2.py3-none-any.whl", hash = "sha256:85defdfd2b2376eb3abf5ca6474b51ab7e0de341c75a02f46dc9b5976f5a5c1b"}, {file = "py4j-0.10.9.7.tar.gz", hash = "sha256:0b6e5315bb3ada5cf62ac651d107bb2ebc02def3dee9d9548e3baac644ea8dbb"}, @@ -4266,10 +3610,8 @@ files = [ name = "pyarrow" version = "19.0.1" description = "Python library for Apache Arrow" -optional = true +optional = false python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"bodo\" or extra == \"daft\" or extra == \"datafusion\" or extra == \"duckdb\" or extra == \"pandas\" or extra == \"pyarrow\" or extra == \"ray\"" files = [ {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69"}, {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec"}, @@ -4324,8 +3666,6 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"gcsfs\" or extra == \"gcp-auth\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -4333,19 +3673,17 @@ files = [ [[package]] name = "pyasn1-modules" -version = "0.4.2" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = true python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"gcsfs\" or extra == \"gcp-auth\"" files = [ - {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, - {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] -pyasn1 = ">=0.6.1,<0.7.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" @@ -4353,142 +3691,138 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {main = "(extra == \"zstandard\" or extra == \"adlfs\") and platform_python_implementation == \"PyPy\" or extra == \"adlfs\"", dev = "platform_python_implementation != \"PyPy\""} [[package]] name = "pydantic" -version = "2.11.7" +version = "2.10.6" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] +python-versions = ">=3.8" files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] -files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -4496,14 +3830,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.19.2" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["main", "dev", "docs"] files = [ - {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, - {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -4511,19 +3844,17 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyiceberg-core" -version = "0.6.0" +version = "0.4.0" description = "" optional = true -python-versions = "~=3.9" -groups = ["main"] -markers = "extra == \"pyarrow\" or extra == \"pyiceberg-core\"" +python-versions = "*" files = [ - {file = "pyiceberg_core-0.6.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2f228a54a2a69912378be18f98ea866bb4a08d265c875856f99cd81f2f7299ba"}, - {file = "pyiceberg_core-0.6.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:edb41a1f182774085b11352a1f44955d561e21453f00973021244471873fbbd7"}, - {file = "pyiceberg_core-0.6.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cf869d225d57254a54bc3778841cffea4193319bc0a849767a15e05e75c9b36"}, - {file = "pyiceberg_core-0.6.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:18c12fe1ac5b4725b673cf0d1d0ab3e9475644ac0dae871a2e9a293c2622f0a8"}, - {file = "pyiceberg_core-0.6.0-cp39-abi3-win_amd64.whl", hash = "sha256:d3249eeae5e1d1f1d2c8bd8d6eced98da002afa7c48c751cb22d8dbd4b091a1e"}, - {file = "pyiceberg_core-0.6.0.tar.gz", hash = "sha256:ce2cac8cf8a85da6e682cec032165fcf387256257971f0f84bc6d50c0941f261"}, + {file = "pyiceberg_core-0.4.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5aec569271c96e18428d542f9b7007117a7232c06017f95cb239d42e952ad3b4"}, + {file = "pyiceberg_core-0.4.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e74773e58efa4df83aba6f6265cdd41e446fa66fa4e343ca86395fed9f209ae"}, + {file = "pyiceberg_core-0.4.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7675d21a54bf3753c740d8df78ad7efe33f438096844e479d4f3493f84830925"}, + {file = "pyiceberg_core-0.4.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7058ad935a40b1838e4cdc5febd768878c1a51f83dca005d5a52a7fa280a2489"}, + {file = "pyiceberg_core-0.4.0-cp39-abi3-win_amd64.whl", hash = "sha256:a83eb4c2307ae3dd321a9360828fb043a4add2cc9797bef0bafa20894488fb07"}, + {file = "pyiceberg_core-0.4.0.tar.gz", hash = "sha256:d2e6138707868477b806ed354aee9c476e437913a331cb9ad9ad46b4054cd11f"}, ] [[package]] @@ -4532,8 +3863,6 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\"" files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -4550,14 +3879,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymdown-extensions" -version = "10.16" +version = "10.14.3" description = "Extension pack for Python Markdown." optional = false -python-versions = ">=3.9" -groups = ["docs"] +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.16-py3-none-any.whl", hash = "sha256:f5dd064a4db588cb2d95229fc4ee63a1b16cc8b4d0e6145c0899ed8723da1df2"}, - {file = "pymdown_extensions-10.16.tar.gz", hash = "sha256:71dac4fca63fabeffd3eb9038b756161a33ec6e8d230853d3cecf562155ab3de"}, + {file = "pymdown_extensions-10.14.3-py3-none-any.whl", hash = "sha256:05e0bee73d64b9c71a4ae17c72abc2f700e8bc8403755a00580b49a4e9f189e9"}, + {file = "pymdown_extensions-10.14.3.tar.gz", hash = "sha256:41e576ce3f5d650be59e900e4ceff231e0aed2a88cf30acaee41e02f063a061b"}, ] [package.dependencies] @@ -4569,14 +3897,13 @@ extra = ["pygments (>=2.19.1)"] [[package]] name = "pyparsing" -version = "3.2.3" +version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, - {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, ] [package.extras] @@ -4588,115 +3915,19 @@ version = "1.2.0" description = "Wrappers to call pyproject.toml-based build backend hooks." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, ] -[[package]] -name = "pyroaring" -version = "1.0.2" -description = "Library for handling efficiently sorted integer sets." -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "pyroaring-1.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20967e63a7d17758e290f8121239707d086b7720de57d04844e1b0aa977786cb"}, - {file = "pyroaring-1.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6e8de37d22e7aa9a482db7153b9e2a2c966e1951ecf5aaa08cec3264ee773f9"}, - {file = "pyroaring-1.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1387a4dd91374864775fe864c058821535a7ebfa96e208622a21db962d66af15"}, - {file = "pyroaring-1.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ae367d11eb6cdf5b1afd939ebc063f5cabf0120efee794f96289eaa4962789"}, - {file = "pyroaring-1.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f55f288c72b5326a3a64fb7104e92c71501490ab4bc5ec48f54379e3f7e655"}, - {file = "pyroaring-1.0.2-cp310-cp310-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4302399ea8b8fcfa54e5a847d8089001f3b3c2c294b5902598e1db30a749c74e"}, - {file = "pyroaring-1.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f4f98d99eb1fdd20300ea45241f6304f90d55820d1b3e70c73a09d4ef1bffc"}, - {file = "pyroaring-1.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:986c5c7bd2831bd5ffa7cb14aa7f20cbf9c762619bcf6a5a4e141d14ba4aa732"}, - {file = "pyroaring-1.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4c4865df32b4c980fb977140a09077d59a4f2cc86f47524c5d76b64650fec0de"}, - {file = "pyroaring-1.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d32be36debb293e8e6bfc8e51f257a1e95b9bc820e0a91196d87cfebe0575af6"}, - {file = "pyroaring-1.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c55caa802473ce136cbbe2f51506d5046c8b71d8e265b8fb0df011f4534af44"}, - {file = "pyroaring-1.0.2-cp310-cp310-win32.whl", hash = "sha256:703f377d0c50d7bfce0734b1256140cffaa666d9b0621fe74708b1141e8a85a7"}, - {file = "pyroaring-1.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:e85e3bc78fce1aa6a83cfc3a67cd36bf02c9261e4f426b6040aa0bd97a89820d"}, - {file = "pyroaring-1.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:108a808a07a7bb773205ab75f7cc22f235f8dffeea85c4bd28b6417fe5480a30"}, - {file = "pyroaring-1.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:85a79f88d94d7586921472720c9a9fcedc5db79bff310413be049c2ca3afb349"}, - {file = "pyroaring-1.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10e82996c70ed344773582be70b34e5b20078626fd2da5063ab396f12f2764ba"}, - {file = "pyroaring-1.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76af7c7c72a61be1ed08062c8f7aed8020879ada0e77d2bcee6fb40657c32399"}, - {file = "pyroaring-1.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01cca981c5a05a4c05f8e1d5908fc50e143d9d3f68dc516525ab2e1e8eefacb6"}, - {file = "pyroaring-1.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8de91fe9bacbae5eb3dbabddc03145da1ce3ce64f421262abe9ea5e99043274"}, - {file = "pyroaring-1.0.2-cp311-cp311-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:32901ca5c5c8bc6a1aa90b006d9c75f918639f639c615bf12ba9b9d41b008d01"}, - {file = "pyroaring-1.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c56152bee9d6e2aef7535ed3874155709c73ccd4860921072a0655488abb9db8"}, - {file = "pyroaring-1.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7dbdd3f4b3efc0c6dcac67aced444ca3a13e2346ace9172f1f5a74a78128777c"}, - {file = "pyroaring-1.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:08a127d1a740326a9954d00c5efec56bff31695b05a798a8dcd55d17f880ca15"}, - {file = "pyroaring-1.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e87aca7ddde5eb729ff4513e44a6c3a1d844d8a50b67b21dd98420a0f47b53d3"}, - {file = "pyroaring-1.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:613fe8ade494a94de4e4c4226c7b526b2be802bb4280a983d6ec68b903130f2d"}, - {file = "pyroaring-1.0.2-cp311-cp311-win32.whl", hash = "sha256:35267f9439cccc6023a71aa929df38cda1f4146392595dc35f2ddc1263bbd56e"}, - {file = "pyroaring-1.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:bd31fece30655ad55b2ae2e0b1f397880c78a1052013a577ca4eafbcba5c1afa"}, - {file = "pyroaring-1.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:b78bfbc2c56c78cd054b3df22215e7d51144ca906e3f115e22b8eade72c7f079"}, - {file = "pyroaring-1.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d9ec623abbfcf50771e460f202d41700bdfb7dc28c6f1b0be0843dd8b61d50ac"}, - {file = "pyroaring-1.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:986a1245b22294ce5ba1b6f4299aea9a410d534718fce0fc6af6fe2251bb8981"}, - {file = "pyroaring-1.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:617b1affe6a09025a0207f6fa1d6f2266ef540c5ab9ab29287311f35825ae63a"}, - {file = "pyroaring-1.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c0782b46e552c71d2598f50cad326446364fab02ec51b37ec26037d78263bc"}, - {file = "pyroaring-1.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47fe576f01b806c5677e08cc60d3765f872f7aeef9204914dacb3ba72067d4b0"}, - {file = "pyroaring-1.0.2-cp312-cp312-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96291b1f4f77ca13ceb3716605bcb63ae91e17c9539bd07fc77509226a7e8d1f"}, - {file = "pyroaring-1.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf02fc3873efefe37429a91d1c384c6c259b3e5b5f4220c9436550b5c480aa3"}, - {file = "pyroaring-1.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0ce11a6d57d6d89759eeb87a727d1657cb1f03ced71d186812799e07879c568e"}, - {file = "pyroaring-1.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:decefea17be7ec3cfe461022ebb73d369f0fe07d39ba813d168267ff82a3f748"}, - {file = "pyroaring-1.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04e6b56384aab6ca2f37e4d868b49689a0e9cf4198d8fdb2f6475a8f13d937ba"}, - {file = "pyroaring-1.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b7d74f12761ee614ebd6337641100c370bc982099fbb109cdd9b23b40ae96273"}, - {file = "pyroaring-1.0.2-cp312-cp312-win32.whl", hash = "sha256:5a5facb5d5c529f4b6967ce75f76806bf8161694dc3d5d4c0f9de3cf614b7735"}, - {file = "pyroaring-1.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:906f07c97487fd0319aa36e36f8ed54b9fdf275297b394cf36803592527b186c"}, - {file = "pyroaring-1.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:51ed8a5677c409c4b896e931b4baaa0f95f6a4c8fe56b6dc0e7f8f6728487465"}, - {file = "pyroaring-1.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f14a300354b2c23de16cc421dc54af4987cb3da4d90df9eddf3bb081a664093d"}, - {file = "pyroaring-1.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:987fc92e443dfcea57b31bd7f0b06e1d46b7361c7d1b3adf5c9d49614195e7ff"}, - {file = "pyroaring-1.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bc40e70fe9917f32776f741ce93acf1de8897260cadf27397252f1e4125a91da"}, - {file = "pyroaring-1.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b482f64a08bd23e3ff7a025fd431da048d4ee7b0dd6e75e83182b1ff418008b"}, - {file = "pyroaring-1.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f9b1a1d309b29de793e2958c1e9808218c449a7516f682627a18ea5782b0e2a"}, - {file = "pyroaring-1.0.2-cp313-cp313-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cf4822333443ff7cdedce36da8365a442ad40e8c42fb1b835a589892cb08de65"}, - {file = "pyroaring-1.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3695317b9f8a95ced9ab2426fcae59f23b540c8d812178e607e090f5546f2359"}, - {file = "pyroaring-1.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:83d92fdf6c236e539fe3258375fd407b822dd7207cbed56a520d364b7cf3a516"}, - {file = "pyroaring-1.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2bc091dab0811359f4fbabbf771485b12e2086c5bd2f27570cca8860e56e3837"}, - {file = "pyroaring-1.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1744c415b75e4cd3b3d26c2d7a2cda6c8f3e6a713e42751189adfe1d41954e7"}, - {file = "pyroaring-1.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:25b223c71a34e0bf4fe004312723918941337ae4770ec22e1ae43050fc45422f"}, - {file = "pyroaring-1.0.2-cp313-cp313-win32.whl", hash = "sha256:41e96f770240d773c13137250dd0c51ab898c435d48d77eae37853939a9bb402"}, - {file = "pyroaring-1.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:a73ce95a6205f2b341147619790ecc75e96a9ca265f9797fc816324df6c26c38"}, - {file = "pyroaring-1.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:3912f2ea93b27b57e914b7d082a6849ff9048644c803de8bb85d7a15cdb420bd"}, - {file = "pyroaring-1.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:44d28fe7cd95be08172e66ec75e87ffae1e77c3e4ddb0dbf4a45b7a593590a77"}, - {file = "pyroaring-1.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d60ac82595cf2f0dfce8513cc91838c589bcf6959f5ab7e1801e6efcc4d79f9"}, - {file = "pyroaring-1.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a0dda1ca2f5419c06f01a99faf4a5962bf74fe18e6b21bf563e704f00a087a2a"}, - {file = "pyroaring-1.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b72de42162bccb12bda41a9b9e68323fbfa7850973f83d1f85480dc9a57b9a3"}, - {file = "pyroaring-1.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db9b2fa9ed2f4310f6c2c46f6b56755f15029a37964664ec02fa76156895798"}, - {file = "pyroaring-1.0.2-cp38-cp38-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:45aa198a2ae487b6c50447215525be8b226ed088f652f6593835a46dba7251e7"}, - {file = "pyroaring-1.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eddbad0004c27c47f5f088150f18824bff70d67e4868db74665acf47e1f1be96"}, - {file = "pyroaring-1.0.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73bb24c851e1860b455421dff2b4373573ebcef62369d82e49485a1d326e0a2c"}, - {file = "pyroaring-1.0.2-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:3a4f7f3819780d86335bdbc39221b5e62cd1c224cddd4e8ba224085295ddbb9a"}, - {file = "pyroaring-1.0.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:35218c167bbd8546119da0eea18dd3297c3aa44b3ec26093a41b44fb853fbb75"}, - {file = "pyroaring-1.0.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1768e31604e145f045d4e0a223fd7c8a5ebf6419e67581141ed673a93980cd3b"}, - {file = "pyroaring-1.0.2-cp38-cp38-win32.whl", hash = "sha256:2957fd752d3b39408ff197fe94c3d1d20e5c7725b9f117c97efe9be7d0dffe1e"}, - {file = "pyroaring-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:c2d5528e9b8162a313bac99bd920ff45b53a87faaea20edf62e903dcb5373d4f"}, - {file = "pyroaring-1.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:171bd25428136eb5d379f21185788d32d86c0bbb2a789e112ecadb80067e4918"}, - {file = "pyroaring-1.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:226dd9e63eb22fa2ad045461b79781bb6d4682e43853007cc54db197ad8e05f5"}, - {file = "pyroaring-1.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db99ede0f2292d3df142b69b87f11fd7b0beacab11ed104fff1e0db87ca52477"}, - {file = "pyroaring-1.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c057963f8b257a1f3dcf4b8bf3272bf241eaac2b7414b17ec3b7eef73b03097"}, - {file = "pyroaring-1.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeb3276817f2f29ed6712c1b5c68e80f334ad435245d19c79cbade8f848a4c04"}, - {file = "pyroaring-1.0.2-cp39-cp39-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a88d5215576a59acad827b1cc5f575cad832dc61626a0c26507d459a3099a266"}, - {file = "pyroaring-1.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f8fa9b97c5b64f13586646ca999d67a6d7064fff79a6ad044316a835e55aedb"}, - {file = "pyroaring-1.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:78ff45b76e858db13d76b13487865575cabff85ec777f3d95c3ff75395237d81"}, - {file = "pyroaring-1.0.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:48b9ac5f5b557698c53030e271b4a135090846e32b17bb414077d888aa666199"}, - {file = "pyroaring-1.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:36a2564957d17ad4c6caa62e4bb19e519d038daf5b6b7c5ef274c0d1283c71fc"}, - {file = "pyroaring-1.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f705b6126e906bf3ee43c64dcba06bde6b13854528dd5ca302cfdc388a52b31c"}, - {file = "pyroaring-1.0.2-cp39-cp39-win32.whl", hash = "sha256:08e73bcd138e7db6ed6dcceaab608db635604d94f012d11639dcea27e7048f4d"}, - {file = "pyroaring-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:bc905d10d7a0c269dc93f6526925deb402069c9d7d265210bb54ffd9650ab5b9"}, - {file = "pyroaring-1.0.2-cp39-cp39-win_arm64.whl", hash = "sha256:5da91936b591db7bf2165f56e47fe0696595a2c9e30c9399df814cfeb6b92c82"}, - {file = "pyroaring-1.0.2.tar.gz", hash = "sha256:aa4a1de882bf4a245c7c0dee8594b898e76501bc32afc0cc9ff4138784462839"}, -] - [[package]] name = "pyspark" -version = "3.5.6" +version = "3.5.3" description = "Apache Spark Python API" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "pyspark-3.5.6.tar.gz", hash = "sha256:f8b1c4360e41ab398c64904fae08740503bcb6bd389457d659fa6d9f2952cc48"}, + {file = "pyspark-3.5.3.tar.gz", hash = "sha256:68b7cc0c0c570a7d8644f49f40d2da8709b01d30c9126cc8cf93b4f84f3d9747"}, ] [package.dependencies] @@ -4715,7 +3946,6 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -4738,7 +3968,6 @@ version = "2.13.0" description = "check the README when running tests" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest_checkdocs-2.13.0-py3-none-any.whl", hash = "sha256:5df5bbd7e9753aa51a5f6954a301a4066bd4a04eb7e0c712c5d5d7ede1cbe153"}, {file = "pytest_checkdocs-2.13.0.tar.gz", hash = "sha256:b0e67169c543986142e15afbc17c772da87fcdb0922c7b1e4f6c60f8769f11f9"}, @@ -4758,7 +3987,6 @@ version = "0.6.3" description = "It helps to use fixtures in pytest.mark.parametrize" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "pytest-lazy-fixture-0.6.3.tar.gz", hash = "sha256:0e7d0c7f74ba33e6e80905e9bfd81f9d15ef9a790de97993e34213deb5ad10ac"}, {file = "pytest_lazy_fixture-0.6.3-py3-none-any.whl", hash = "sha256:e0b379f38299ff27a653f03eaa69b08a6fd4484e46fd1c9907d984b9f9daeda6"}, @@ -4769,14 +3997,13 @@ pytest = ">=3.2.5" [[package]] name = "pytest-mock" -version = "3.14.1" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0"}, - {file = "pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] @@ -4791,7 +4018,6 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev", "docs"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -4806,8 +4032,6 @@ version = "0.7.3" description = "Python library for the snappy compression library from Google" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"snappy\"" files = [ {file = "python_snappy-0.7.3-py3-none-any.whl", hash = "sha256:074c0636cfcd97e7251330f428064050ac81a52c62ed884fc2ddebbb60ed7f50"}, {file = "python_snappy-0.7.3.tar.gz", hash = "sha256:40216c1badfb2d38ac781ecb162a1d0ec40f8ee9747e610bcfefdfa79486cee3"}, @@ -4818,46 +4042,40 @@ cramjam = "*" [[package]] name = "pytz" -version = "2025.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"bodo\" or extra == \"pandas\" or extra == \"ray\"" files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] name = "pywin32" -version = "311" +version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, - {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, - {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, - {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, - {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, - {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, - {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, - {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, - {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, - {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, - {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, - {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, - {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, - {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, - {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, - {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, - {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, - {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, - {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, - {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] @@ -4866,7 +4084,6 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "docs"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -4922,18 +4139,16 @@ files = [ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -markers = {main = "extra == \"ray\" or extra == \"hf\""} [[package]] name = "pyyaml-env-tag" -version = "1.1" -description = "A custom YAML tag for referencing environment variables in YAML files." +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " optional = false -python-versions = ">=3.9" -groups = ["docs"] +python-versions = ">=3.6" files = [ - {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, - {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] [package.dependencies] @@ -4941,33 +4156,31 @@ pyyaml = "*" [[package]] name = "ray" -version = "2.44.0" +version = "2.42.1" description = "Ray provides a simple, universal API for building distributed applications." optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"ray\"" -files = [ - {file = "ray-2.44.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:632790c327e6931a7a8ccadde8fd3afaeb73ad382f87df4dd47a52ca8bfe051c"}, - {file = "ray-2.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5dfbf26b30aec37e5d4425c660145e5520299a8855324686e2f17fc8601bf4c8"}, - {file = "ray-2.44.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a99fbb9ad2c1af221870d86b8a8e3d59c18a5513adde9d7088a4a275dc59da7f"}, - {file = "ray-2.44.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:b4fdbd4b2b5b45e413dc16a19a4abf5000d36c3c5854908dca4697323ff5d7e1"}, - {file = "ray-2.44.0-cp310-cp310-win_amd64.whl", hash = "sha256:0258a48e49f531f83a7c65c3482df0c6568491e35ac606a6f651fb4718e49dbb"}, - {file = "ray-2.44.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fbe4832cb2efcfc0493ea4742b4828b1eb0dabcfedf87f64be6be1d0ce874c69"}, - {file = "ray-2.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5337227dc9f8084280c29456988a244ca9b4ce0fbc7385d73070120f47e46979"}, - {file = "ray-2.44.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:c337237e7a8a1d8702dcf67e0a98ea8cd4ec0357d288bf0816f8990c258d8bc3"}, - {file = "ray-2.44.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:864f0a69b3cd7ca4eb7043f7f79dc9ce8b71a2c982eeec7f117f48f2846b713c"}, - {file = "ray-2.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:18cef276f2789a3ed22d78274d58803e28defb66ff8d03bdce1ea8b433dea5f8"}, - {file = "ray-2.44.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:53dc75ea2b4fd869ea4a6cca9de5e02aa24f2f0d18e0a08b8a765ab2be65dd1c"}, - {file = "ray-2.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:398e9be193c97f734af019f0eface1f45c94195b96ecc4a647ad607650df572c"}, - {file = "ray-2.44.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:2a911e699e483ac4879110b608b06b35e602191c0e7b97326ca497c5caafe6a8"}, - {file = "ray-2.44.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:0d65ac523801e40a397bbf552f406867bb9469dd261046ca63cdc2ec3110db87"}, - {file = "ray-2.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:9bb3b6df352653c4479325161a0e17e2c6b3278661c69ff842602d7440312af7"}, - {file = "ray-2.44.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e37ef7c1294302bae9d921680a2da347988c1e1e2a982a3e72892d11ae00e23e"}, - {file = "ray-2.44.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11feb6786f820104647b66a292545536c037e8297f14fe01234b7b24dd8f2739"}, - {file = "ray-2.44.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2f18f48fc37de640315d93601026dfaa23f0af4cba8f077db13f1d77e991d9af"}, - {file = "ray-2.44.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:51ccbd5bf8045d69f1f8d2f85a92f66a82818f3d92d70c77c662757981c30d9f"}, - {file = "ray-2.44.0-cp39-cp39-win_amd64.whl", hash = "sha256:36b1470dbbac3c7cba6a5771f6ecfb696c0aacf0fe56d744154051197651f093"}, +files = [ + {file = "ray-2.42.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:b9f2f20cb2dddf52ec07e254f38ba91467b86df11330899d6ae236183e395275"}, + {file = "ray-2.42.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60727f9c72a8f71bc4e14d47dc4dc494dc59a3c4b0d108ae04fa6e5a8e45228f"}, + {file = "ray-2.42.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:90d8bf0c1afe2364a33f535636761a574e38d283b040613b8e8639be141d04a0"}, + {file = "ray-2.42.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01802249eb9cd36326e6fe0baa88916fa6062731da12506bc93e736f17111dd4"}, + {file = "ray-2.42.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2e2f23aea57c28679b357ca88879b1b9621bbd2e1d87514509daac50294c3b1"}, + {file = "ray-2.42.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4e81c896779d8ace66afc2ac75050806db102d9501a2ed6ea2f38010962cca7f"}, + {file = "ray-2.42.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f019514c5220a822fbc0c38ed1f7505cec75b961a7604ab677fd6477e33a2a2e"}, + {file = "ray-2.42.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:c5d79e498aceb5aa5b3e5307ec7495f58486b4266b38feea3979b9881e950c4f"}, + {file = "ray-2.42.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:cf5bc432752e29bc800e30003bd64933d785343f59a9a8c31a839cd981fc5084"}, + {file = "ray-2.42.1-cp311-cp311-win_amd64.whl", hash = "sha256:bb59a000dfc83d16e3b93f8167b7aa81d639749a0a3683d2f0f898782f0f7739"}, + {file = "ray-2.42.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:b7ef48916432a0d5cccabefc8cbd8bf0c0d2ad0b8841cce3cebd1b133996ca36"}, + {file = "ray-2.42.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a2b6f62590bb605d66d38deb495f3832a6d0301db3f496adc54d12a144541e37"}, + {file = "ray-2.42.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:9ca5c7fd5f676e8317812e77018f62f87c5b39ae0ea7f9f80d6e98cd22fdf55a"}, + {file = "ray-2.42.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:e0da7ffba72d3ac27507816f00f2ad334f815835f47b8b04821cc5750ec59647"}, + {file = "ray-2.42.1-cp312-cp312-win_amd64.whl", hash = "sha256:27d2fd8a945afb8c60685cab8107247a9fe43a4b2bed15f978e368341fcffb3b"}, + {file = "ray-2.42.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:12059ae21810d0ae8b09cc7c379d52cd108881b8b9e9c628d32c045970fc2ac4"}, + {file = "ray-2.42.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3228e4846502e0c5beae69b699fc9071a06d3cfbfc0ca5f2bd2707924a52e34b"}, + {file = "ray-2.42.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:57f7a98828581804a9e77ec99f3fddd54225dabda9c0d9a6771eb7d22e693072"}, + {file = "ray-2.42.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:2d40136918cdd7f60710aa46452eefa9c0e460e2c4e75b2fc5723c73de76d701"}, + {file = "ray-2.42.1-cp39-cp39-win_amd64.whl", hash = "sha256:ce642dbbf1e0f137a3f0ac91af5791bf936765abc403383efe38e0243aa7c7fb"}, ] [package.dependencies] @@ -4983,22 +4196,21 @@ pyyaml = "*" requests = "*" [package.extras] -adag = ["cupy-cuda12x ; sys_platform != \"darwin\""] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.44.0)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -cgraph = ["cupy-cuda12x ; sys_platform != \"darwin\""] -client = ["grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\""] -cpp = ["ray-cpp (==2.44.0)"] -data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] -llm = ["aiohttp (>=3.7)", "aiohttp-cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.7.2)", "watchfiles"] -observability = ["memray ; sys_platform != \"win32\"", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] -rllib = ["dm-tree", "fsspec", "gymnasium (==1.0.0)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -train = ["fsspec", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "tensorboardX (>=1.9)"] -tune = ["fsspec", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "requests", "tensorboardX (>=1.9)"] +adag = ["cupy-cuda12x"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "py-spy (>=0.4.0)", "pyarrow (<18)", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==1.0.0)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "py-spy (>=0.4.0)", "pyOpenSSL", "pyarrow (<18)", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==1.0.0)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "py-spy (>=0.4.0)", "pyOpenSSL", "pyarrow (<18)", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.42.1)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +cgraph = ["cupy-cuda12x"] +client = ["grpcio", "grpcio (!=1.56.0)"] +cpp = ["ray-cpp (==2.42.1)"] +data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (<18)", "pyarrow (>=9.0.0)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "py-spy (>=0.4.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] +observability = ["memray", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] +rllib = ["dm-tree", "fsspec", "gymnasium (==1.0.0)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (<18)", "pyarrow (>=9.0.0)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "py-spy (>=0.4.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "py-spy (>=0.4.0)", "pyOpenSSL", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +train = ["fsspec", "pandas", "pyarrow (<18)", "pyarrow (>=9.0.0)", "requests", "tensorboardX (>=1.9)"] +tune = ["fsspec", "pandas", "pyarrow (<18)", "pyarrow (>=9.0.0)", "requests", "tensorboardX (>=1.9)"] [[package]] name = "referencing" @@ -5006,12 +4218,10 @@ version = "0.36.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, ] -markers = {main = "extra == \"ray\""} [package.dependencies] attrs = ">=22.2.0" @@ -5024,7 +4234,6 @@ version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -5124,19 +4333,18 @@ files = [ [[package]] name = "requests" -version = "2.32.4" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "dev", "docs"] files = [ - {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, - {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset_normalizer = ">=2,<4" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -5150,7 +4358,6 @@ version = "1.12.1" description = "Mock out responses from the requests package" optional = false python-versions = ">=3.5" -groups = ["dev"] files = [ {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, @@ -5168,8 +4375,6 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = true python-versions = ">=3.4" -groups = ["main"] -markers = "extra == \"gcsfs\"" files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -5184,29 +4389,28 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "requirements-parser" -version = "0.13.0" +version = "0.11.0" description = "This is a small Python module for parsing Pip requirement files." optional = false python-versions = "<4.0,>=3.8" -groups = ["dev"] files = [ - {file = "requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14"}, - {file = "requirements_parser-0.13.0.tar.gz", hash = "sha256:0843119ca2cb2331de4eb31b10d70462e39ace698fd660a915c247d2301a4418"}, + {file = "requirements_parser-0.11.0-py3-none-any.whl", hash = "sha256:50379eb50311834386c2568263ae5225d7b9d0867fb55cf4ecc93959de2c2684"}, + {file = "requirements_parser-0.11.0.tar.gz", hash = "sha256:35f36dc969d14830bf459803da84f314dc3d17c802592e9e970f63d0359e5920"}, ] [package.dependencies] packaging = ">=23.2" +types-setuptools = ">=69.1.0" [[package]] name = "responses" -version = "0.25.7" +version = "0.25.6" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, - {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, + {file = "responses-0.25.6-py3-none-any.whl", hash = "sha256:9cac8f21e1193bb150ec557875377e41ed56248aed94e4567ed644db564bacf1"}, + {file = "responses-0.25.6.tar.gz", hash = "sha256:eae7ce61a9603004e76c05691e7c389e59652d91e94b419623c12bbfb8e331d8"}, ] [package.dependencies] @@ -5215,7 +4419,7 @@ requests = ">=2.30.0,<3.0" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "rfc3339-validator" @@ -5223,7 +4427,6 @@ version = "0.1.4" description = "A pure python RFC3339 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["dev"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -5234,189 +4437,144 @@ six = "*" [[package]] name = "rich" -version = "14.1.0" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" -groups = ["main"] files = [ - {file = "rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f"}, - {file = "rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.26.0" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] -files = [ - {file = "rpds_py-0.26.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4c70c70f9169692b36307a95f3d8c0a9fcd79f7b4a383aad5eaa0e9718b79b37"}, - {file = "rpds_py-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:777c62479d12395bfb932944e61e915741e364c843afc3196b694db3d669fcd0"}, - {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec671691e72dff75817386aa02d81e708b5a7ec0dec6669ec05213ff6b77e1bd"}, - {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a1cb5d6ce81379401bbb7f6dbe3d56de537fb8235979843f0d53bc2e9815a79"}, - {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f789e32fa1fb6a7bf890e0124e7b42d1e60d28ebff57fe806719abb75f0e9a3"}, - {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c55b0a669976cf258afd718de3d9ad1b7d1fe0a91cd1ab36f38b03d4d4aeaaf"}, - {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70d9ec912802ecfd6cd390dadb34a9578b04f9bcb8e863d0a7598ba5e9e7ccc"}, - {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3021933c2cb7def39d927b9862292e0f4c75a13d7de70eb0ab06efed4c508c19"}, - {file = "rpds_py-0.26.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a7898b6ca3b7d6659e55cdac825a2e58c638cbf335cde41f4619e290dd0ad11"}, - {file = "rpds_py-0.26.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:12bff2ad9447188377f1b2794772f91fe68bb4bbfa5a39d7941fbebdbf8c500f"}, - {file = "rpds_py-0.26.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:191aa858f7d4902e975d4cf2f2d9243816c91e9605070aeb09c0a800d187e323"}, - {file = "rpds_py-0.26.0-cp310-cp310-win32.whl", hash = "sha256:b37a04d9f52cb76b6b78f35109b513f6519efb481d8ca4c321f6a3b9580b3f45"}, - {file = "rpds_py-0.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:38721d4c9edd3eb6670437d8d5e2070063f305bfa2d5aa4278c51cedcd508a84"}, - {file = "rpds_py-0.26.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9e8cb77286025bdb21be2941d64ac6ca016130bfdcd228739e8ab137eb4406ed"}, - {file = "rpds_py-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e09330b21d98adc8ccb2dbb9fc6cb434e8908d4c119aeaa772cb1caab5440a0"}, - {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9c1b92b774b2e68d11193dc39620d62fd8ab33f0a3c77ecdabe19c179cdbc1"}, - {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:824e6d3503ab990d7090768e4dfd9e840837bae057f212ff9f4f05ec6d1975e7"}, - {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ad7fd2258228bf288f2331f0a6148ad0186b2e3643055ed0db30990e59817a6"}, - {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dc23bbb3e06ec1ea72d515fb572c1fea59695aefbffb106501138762e1e915e"}, - {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80bf832ac7b1920ee29a426cdca335f96a2b5caa839811803e999b41ba9030d"}, - {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0919f38f5542c0a87e7b4afcafab6fd2c15386632d249e9a087498571250abe3"}, - {file = "rpds_py-0.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d422b945683e409000c888e384546dbab9009bb92f7c0b456e217988cf316107"}, - {file = "rpds_py-0.26.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a7711fa562ba2da1aa757e11024ad6d93bad6ad7ede5afb9af144623e5f76a"}, - {file = "rpds_py-0.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238e8c8610cb7c29460e37184f6799547f7e09e6a9bdbdab4e8edb90986a2318"}, - {file = "rpds_py-0.26.0-cp311-cp311-win32.whl", hash = "sha256:893b022bfbdf26d7bedb083efeea624e8550ca6eb98bf7fea30211ce95b9201a"}, - {file = "rpds_py-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:87a5531de9f71aceb8af041d72fc4cab4943648d91875ed56d2e629bef6d4c03"}, - {file = "rpds_py-0.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:de2713f48c1ad57f89ac25b3cb7daed2156d8e822cf0eca9b96a6f990718cc41"}, - {file = "rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d"}, - {file = "rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136"}, - {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582"}, - {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e"}, - {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15"}, - {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8"}, - {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a"}, - {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323"}, - {file = "rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158"}, - {file = "rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3"}, - {file = "rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2"}, - {file = "rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44"}, - {file = "rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c"}, - {file = "rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8"}, - {file = "rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d"}, - {file = "rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1"}, - {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e"}, - {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1"}, - {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9"}, - {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7"}, - {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04"}, - {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1"}, - {file = "rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9"}, - {file = "rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9"}, - {file = "rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba"}, - {file = "rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b"}, - {file = "rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5"}, - {file = "rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256"}, - {file = "rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618"}, - {file = "rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35"}, - {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f"}, - {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83"}, - {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1"}, - {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8"}, - {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f"}, - {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed"}, - {file = "rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632"}, - {file = "rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c"}, - {file = "rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0"}, - {file = "rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9"}, - {file = "rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9"}, - {file = "rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a"}, - {file = "rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf"}, - {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12"}, - {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20"}, - {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331"}, - {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f"}, - {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246"}, - {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387"}, - {file = "rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af"}, - {file = "rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33"}, - {file = "rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953"}, - {file = "rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9"}, - {file = "rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37"}, - {file = "rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867"}, - {file = "rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da"}, - {file = "rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7"}, - {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad"}, - {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d"}, - {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca"}, - {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19"}, - {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8"}, - {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b"}, - {file = "rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a"}, - {file = "rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170"}, - {file = "rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e"}, - {file = "rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f"}, - {file = "rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7"}, - {file = "rpds_py-0.26.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7a48af25d9b3c15684059d0d1fc0bc30e8eee5ca521030e2bffddcab5be40226"}, - {file = "rpds_py-0.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c71c2f6bf36e61ee5c47b2b9b5d47e4d1baad6426bfed9eea3e858fc6ee8806"}, - {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d815d48b1804ed7867b539236b6dd62997850ca1c91cad187f2ddb1b7bbef19"}, - {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84cfbd4d4d2cdeb2be61a057a258d26b22877266dd905809e94172dff01a42ae"}, - {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbaa70553ca116c77717f513e08815aec458e6b69a028d4028d403b3bc84ff37"}, - {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39bfea47c375f379d8e87ab4bb9eb2c836e4f2069f0f65731d85e55d74666387"}, - {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1533b7eb683fb5f38c1d68a3c78f5fdd8f1412fa6b9bf03b40f450785a0ab915"}, - {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c5ab0ee51f560d179b057555b4f601b7df909ed31312d301b99f8b9fc6028284"}, - {file = "rpds_py-0.26.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e5162afc9e0d1f9cae3b577d9c29ddbab3505ab39012cb794d94a005825bde21"}, - {file = "rpds_py-0.26.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:43f10b007033f359bc3fa9cd5e6c1e76723f056ffa9a6b5c117cc35720a80292"}, - {file = "rpds_py-0.26.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3730a48e5622e598293eee0762b09cff34dd3f271530f47b0894891281f051d"}, - {file = "rpds_py-0.26.0-cp39-cp39-win32.whl", hash = "sha256:4b1f66eb81eab2e0ff5775a3a312e5e2e16bf758f7b06be82fb0d04078c7ac51"}, - {file = "rpds_py-0.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:519067e29f67b5c90e64fb1a6b6e9d2ec0ba28705c51956637bac23a2f4ddae1"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3c0909c5234543ada2515c05dc08595b08d621ba919629e94427e8e03539c958"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c1fb0cda2abcc0ac62f64e2ea4b4e64c57dfd6b885e693095460c61bde7bb18e"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d142d2d6cf9b31c12aa4878d82ed3b2324226270b89b676ac62ccd7df52d08"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a547e21c5610b7e9093d870be50682a6a6cf180d6da0f42c47c306073bfdbbf6"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35e9a70a0f335371275cdcd08bc5b8051ac494dd58bff3bbfb421038220dc871"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dfa6115c6def37905344d56fb54c03afc49104e2ca473d5dedec0f6606913b4"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313cfcd6af1a55a286a3c9a25f64af6d0e46cf60bc5798f1db152d97a216ff6f"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f7bf2496fa563c046d05e4d232d7b7fd61346e2402052064b773e5c378bf6f73"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:aa81873e2c8c5aa616ab8e017a481a96742fdf9313c40f14338ca7dbf50cb55f"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:68ffcf982715f5b5b7686bdd349ff75d422e8f22551000c24b30eaa1b7f7ae84"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6188de70e190847bb6db3dc3981cbadff87d27d6fe9b4f0e18726d55795cee9b"}, - {file = "rpds_py-0.26.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1c962145c7473723df9722ba4c058de12eb5ebedcb4e27e7d902920aa3831ee8"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f61a9326f80ca59214d1cceb0a09bb2ece5b2563d4e0cd37bfd5515c28510674"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:183f857a53bcf4b1b42ef0f57ca553ab56bdd170e49d8091e96c51c3d69ca696"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:941c1cfdf4799d623cf3aa1d326a6b4fdb7a5799ee2687f3516738216d2262fb"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72a8d9564a717ee291f554eeb4bfeafe2309d5ec0aa6c475170bdab0f9ee8e88"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511d15193cbe013619dd05414c35a7dedf2088fcee93c6bbb7c77859765bd4e8"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aea1f9741b603a8d8fedb0ed5502c2bc0accbc51f43e2ad1337fe7259c2b77a5"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4019a9d473c708cf2f16415688ef0b4639e07abaa569d72f74745bbeffafa2c7"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:093d63b4b0f52d98ebae33b8c50900d3d67e0666094b1be7a12fffd7f65de74b"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2abe21d8ba64cded53a2a677e149ceb76dcf44284202d737178afe7ba540c1eb"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:4feb7511c29f8442cbbc28149a92093d32e815a28aa2c50d333826ad2a20fdf0"}, - {file = "rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a90a13408a7a856b87be8a9f008fff53c5080eea4e4180f6c2e546e4a972fb5d"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ac51b65e8dc76cf4949419c54c5528adb24fc721df722fd452e5fbc236f5c40"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59b2093224a18c6508d95cfdeba8db9cbfd6f3494e94793b58972933fcee4c6d"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f01a5d6444a3258b00dc07b6ea4733e26f8072b788bef750baa37b370266137"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6e2c12160c72aeda9d1283e612f68804621f448145a210f1bf1d79151c47090"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb28c1f569f8d33b2b5dcd05d0e6ef7005d8639c54c2f0be824f05aedf715255"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1766b5724c3f779317d5321664a343c07773c8c5fd1532e4039e6cc7d1a815be"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b6d9e5a2ed9c4988c8f9b28b3bc0e3e5b1aaa10c28d210a594ff3a8c02742daf"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7a446ddaf6ca0fad9a5535b56fbfc29998bf0e0b450d174bbec0d600e1d72"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:eed5ac260dd545fbc20da5f4f15e7efe36a55e0e7cf706e4ec005b491a9546a0"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:582462833ba7cee52e968b0341b85e392ae53d44c0f9af6a5927c80e539a8b67"}, - {file = "rpds_py-0.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69a607203441e07e9a8a529cff1d5b73f6a160f22db1097211e6212a68567d11"}, - {file = "rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0"}, -] -markers = {main = "extra == \"ray\""} +files = [ + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, +] [[package]] name = "rsa" -version = "4.9.1" +version = "4.9" description = "Pure-Python RSA implementation" optional = true -python-versions = "<4,>=3.6" -groups = ["main"] -markers = "extra == \"gcsfs\" or extra == \"gcp-auth\"" +python-versions = ">=3.6,<4" files = [ - {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, - {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] [package.dependencies] @@ -5424,21 +4582,19 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3fs" -version = "2025.5.1" +version = "2025.2.0" description = "Convenient Filesystem interface over S3" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"s3fs\"" files = [ - {file = "s3fs-2025.5.1-py3-none-any.whl", hash = "sha256:7475e7c40a3a112f17144907ffae50782ab6c03487fe0b45a9c3942bb7a5c606"}, - {file = "s3fs-2025.5.1.tar.gz", hash = "sha256:84beffa231b8ed94f8d667e93387b38351e1c4447aedea5c2c19dd88b7fcb658"}, + {file = "s3fs-2025.2.0-py3-none-any.whl", hash = "sha256:4b66b773519c1983e3071e13a42a2f2498d87da13dee40fda0622f4ed1b55664"}, + {file = "s3fs-2025.2.0.tar.gz", hash = "sha256:d94b985f55add51c655e9ca9b4ceecb5c4b6389aecde162bdebc89f489a4e9f2"}, ] [package.dependencies] aiobotocore = ">=2.5.4,<3.0.0" aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" -fsspec = "2025.5.1" +fsspec = "==2025.2.0.*" [package.extras] awscli = ["aiobotocore[awscli] (>=2.5.4,<3.0.0)"] @@ -5446,43 +4602,40 @@ boto3 = ["aiobotocore[boto3] (>=2.5.4,<3.0.0)"] [[package]] name = "s3transfer" -version = "0.13.0" +version = "0.11.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be"}, - {file = "s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"}, + {file = "s3transfer-0.11.2-py3-none-any.whl", hash = "sha256:be6ecb39fadd986ef1701097771f87e4d2f821f27f6071c872143884d2950fbc"}, + {file = "s3transfer-0.11.2.tar.gz", hash = "sha256:3b39185cb72f5acc77db1a58b6e25b977f28d20496b6e58d6813d75f464d632f"}, ] -markers = {main = "extra == \"dynamodb\" or extra == \"glue\" or extra == \"rest-sigv4\""} [package.dependencies] -botocore = ">=1.37.4,<2.0a.0" +botocore = ">=1.36.0,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] +crt = ["botocore[crt] (>=1.36.0,<2.0a.0)"] [[package]] name = "setuptools" -version = "80.9.0" +version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -5490,7 +4643,6 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev", "docs"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -5498,14 +4650,13 @@ files = [ [[package]] name = "snowballstemmer" -version = "3.0.1" -description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" -groups = ["dev"] +python-versions = "*" files = [ - {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, - {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] @@ -5514,7 +4665,6 @@ version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -5522,39 +4672,39 @@ files = [ [[package]] name = "sphinx" -version = "3.5.3" +version = "7.4.7" description = "Python documentation generator" optional = false -python-versions = ">=3.5" -groups = ["dev"] +python-versions = ">=3.9" files = [ - {file = "Sphinx-3.5.3-py3-none-any.whl", hash = "sha256:3f01732296465648da43dec8fb40dc451ba79eb3e2cc5c6d79005fd98197107d"}, - {file = "Sphinx-3.5.3.tar.gz", hash = "sha256:ce9c228456131bab09a3d7d10ae58474de562a6f79abb3dc811ae401cf8c1abc"}, + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.12" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" -requests = ">=2.5.0" -setuptools = "*" -snowballstemmer = ">=1.1" +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.800)"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast ; python_version < \"3.8\""] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinxcontrib-applehelp" @@ -5562,7 +4712,6 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -5579,7 +4728,6 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -5596,7 +4744,6 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -5613,7 +4760,6 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" -groups = ["dev"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -5628,7 +4774,6 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -5645,7 +4790,6 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -5658,72 +4802,80 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "2.0.43" +version = "2.0.38" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"sql-postgres\" or extra == \"sql-sqlite\"" -files = [ - {file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"}, - {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"}, - {file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cdeff998cb294896a34e5b2f00e383e7c5c4ef3b4bfa375d9104723f15186443"}, - {file = "SQLAlchemy-2.0.43-cp37-cp37m-win32.whl", hash = "sha256:c697575d0e2b0a5f0433f679bda22f63873821d991e95a90e9e52aae517b2e32"}, - {file = "SQLAlchemy-2.0.43-cp37-cp37m-win_amd64.whl", hash = "sha256:d34c0f6dbefd2e816e8f341d0df7d4763d382e3f452423e752ffd1e213da2512"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00"}, - {file = "sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921"}, - {file = "sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d"}, - {file = "sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d"}, - {file = "sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197"}, - {file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46293c39252f93ea0910aababa8752ad628bcce3a10d3f260648dd472256983f"}, - {file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6e2bf13d9256398d037fef09fd8bf9b0bf77876e22647d10761d35593b9ac547"}, - {file = "sqlalchemy-2.0.43-cp38-cp38-win32.whl", hash = "sha256:13194276e69bb2af56198fef7909d48fd34820de01d9c92711a5fa45497cc7ed"}, - {file = "sqlalchemy-2.0.43-cp38-cp38-win_amd64.whl", hash = "sha256:334f41fa28de9f9be4b78445e68530da3c5fa054c907176460c81494f4ae1f5e"}, - {file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad"}, - {file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7"}, - {file = "sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414"}, - {file = "sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b"}, - {file = "sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc"}, - {file = "sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417"}, +files = [ + {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:402c2316d95ed90d3d3c25ad0390afa52f4d2c56b348f212aa9c8d072a40eee5"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6493bc0eacdbb2c0f0d260d8988e943fee06089cd239bd7f3d0c45d1657a70e2"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0561832b04c6071bac3aad45b0d3bb6d2c4f46a8409f0a7a9c9fa6673b41bc03"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49aa2cdd1e88adb1617c672a09bf4ebf2f05c9448c6dbeba096a3aeeb9d4d443"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-win32.whl", hash = "sha256:64aa8934200e222f72fcfd82ee71c0130a9c07d5725af6fe6e919017d095b297"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-win_amd64.whl", hash = "sha256:c57b8e0841f3fce7b703530ed70c7c36269c6d180ea2e02e36b34cb7288c50c7"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf89e0e4a30714b357f5d46b6f20e0099d38b30d45fa68ea48589faf5f12f62d"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8455aa60da49cb112df62b4721bd8ad3654a3a02b9452c783e651637a1f21fa2"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53c0d6a859b2db58332e0e6a921582a02c1677cc93d4cbb36fdf49709b327b2"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c4817dff8cef5697f5afe5fec6bc1783994d55a68391be24cb7d80d2dbc3a6"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9cea5b756173bb86e2235f2f871b406a9b9d722417ae31e5391ccaef5348f2c"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40e9cdbd18c1f84631312b64993f7d755d85a3930252f6276a77432a2b25a2f3"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-win32.whl", hash = "sha256:cb39ed598aaf102251483f3e4675c5dd6b289c8142210ef76ba24aae0a8f8aba"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl", hash = "sha256:f9d57f1b3061b3e21476b0ad5f0397b112b94ace21d1f439f2db472e568178ae"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, + {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, + {file = "sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb"}, ] [package.dependencies] -greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] -aioodbc = ["aioodbc", "greenlet (>=1)"] -aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (>=1)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -5734,7 +4886,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -5749,7 +4901,6 @@ version = "1.7.3" description = "Strict, typed YAML parser" optional = false python-versions = ">=3.7.0" -groups = ["main"] files = [ {file = "strictyaml-1.7.3-py3-none-any.whl", hash = "sha256:fb5c8a4edb43bebb765959e420f9b3978d7f1af88c80606c03fb420888f5d1c7"}, {file = "strictyaml-1.7.3.tar.gz", hash = "sha256:22f854a5fcab42b5ddba8030a0e4be51ca89af0267961c8d6cfa86395586c407"}, @@ -5760,14 +4911,13 @@ python-dateutil = ">=2.6.0" [[package]] name = "sympy" -version = "1.14.0" +version = "1.13.3" description = "Computer algebra system (CAS) in Python" optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.8" files = [ - {file = "sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}, - {file = "sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}, + {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, + {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, ] [package.dependencies] @@ -5778,14 +4928,13 @@ dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] [[package]] name = "tenacity" -version = "9.1.2" +version = "9.0.0" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.9" -groups = ["main"] +python-versions = ">=3.8" files = [ - {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, - {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, ] [package.extras] @@ -5794,16 +4943,17 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "thrift" -version = "0.22.0" +version = "0.21.0" description = "Python bindings for the Apache Thrift RPC system" optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"hive-kerberos\" or extra == \"hive\"" files = [ - {file = "thrift-0.22.0.tar.gz", hash = "sha256:42e8276afbd5f54fe1d364858b6877bc5e5a4a5ed69f6a005b94ca4918fe1466"}, + {file = "thrift-0.21.0.tar.gz", hash = "sha256:5e6f7c50f936ebfa23e924229afc95eb219f8c8e5a83202dd4a391244803e402"}, ] +[package.dependencies] +six = ">=1.7.2" + [package.extras] all = ["tornado (>=4.0)", "twisted"] tornado = ["tornado (>=4.0)"] @@ -5815,8 +4965,6 @@ version = "0.4.3" description = "Thrift SASL Python module that implements SASL transports for Thrift (`TSaslClientTransport`)." optional = true python-versions = "*" -groups = ["main"] -markers = "extra == \"hive-kerberos\"" files = [ {file = "thrift_sasl-0.4.3-py2.py3-none-any.whl", hash = "sha256:d24b49140115e6e2a96d08335cff225a27a28ea71866fb1b2bdb30ca5afca64e"}, {file = "thrift_sasl-0.4.3.tar.gz", hash = "sha256:5bdd5b760d90a13d9b3abfce873db0425861aa8d6bf25912d3cc0467a4f773da"}, @@ -5833,8 +4981,6 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version <= \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -5876,8 +5022,6 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"daft\" or extra == \"hf\"" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -5894,44 +5038,36 @@ slack = ["slack-sdk"] telegram = ["requests"] [[package]] -name = "typing-extensions" -version = "4.14.1" -description = "Backported and Experimental Type Hints for Python 3.9+" +name = "types-setuptools" +version = "75.8.0.20250210" +description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" -groups = ["main", "dev", "docs"] files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, + {file = "types_setuptools-75.8.0.20250210-py3-none-any.whl", hash = "sha256:a217d7b4d59be04c29e23d142c959a0f85e71292fd3fc4313f016ca11f0b56dc"}, + {file = "types_setuptools-75.8.0.20250210.tar.gz", hash = "sha256:c1547361b2441f07c94e25dce8a068e18c611593ad4b6fdd727b1a8f5d1fda33"}, ] -markers = {docs = "python_version <= \"3.10\""} [[package]] -name = "typing-inspection" -version = "0.4.1" -description = "Runtime typing introspection tools" +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] +python-versions = ">=3.8" files = [ - {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, - {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[package.dependencies] -typing-extensions = ">=4.12.0" - [[package]] name = "tzdata" -version = "2025.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = true python-versions = ">=2" -groups = ["main"] -markers = "extra == \"bodo\" or extra == \"pandas\" or extra == \"ray\"" files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -5940,47 +5076,42 @@ version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -groups = ["main", "dev", "docs"] -markers = "python_version < \"3.10\"" files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] -brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.5.0" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev", "docs"] -markers = "python_version >= \"3.10\"" files = [ - {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, - {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.31.2" +version = "20.29.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, - {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, + {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, + {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, ] [package.dependencies] @@ -5990,7 +5121,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "watchdog" @@ -5998,7 +5129,6 @@ version = "6.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, @@ -6041,7 +5171,6 @@ version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -6059,7 +5188,6 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -6141,7 +5269,6 @@ files = [ {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] -markers = {main = "extra == \"s3fs\""} [[package]] name = "xmltodict" @@ -6149,7 +5276,6 @@ version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, @@ -6157,143 +5283,117 @@ files = [ [[package]] name = "yarl" -version = "1.20.1" +version = "1.18.3" description = "Yet another URL library" optional = true python-versions = ">=3.9" -groups = ["main"] -markers = "extra == \"adlfs\" or extra == \"gcsfs\" or extra == \"s3fs\"" -files = [ - {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}, - {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}, - {file = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"}, - {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"}, - {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"}, - {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"}, - {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"}, - {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"}, - {file = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"}, - {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"}, - {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"}, - {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"}, - {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"}, - {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"}, - {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"}, - {file = "yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}, - {file = "yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}, - {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"}, - {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"}, - {file = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"}, - {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"}, - {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"}, - {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"}, - {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"}, - {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"}, - {file = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"}, - {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"}, - {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"}, - {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"}, - {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"}, - {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"}, - {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"}, - {file = "yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"}, - {file = "yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"}, - {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9"}, - {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a"}, - {file = "yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2"}, - {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee"}, - {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819"}, - {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16"}, - {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6"}, - {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd"}, - {file = "yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a"}, - {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38"}, - {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef"}, - {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f"}, - {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8"}, - {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a"}, - {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}, - {file = "yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}, - {file = "yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}, - {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a"}, - {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3"}, - {file = "yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7"}, - {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691"}, - {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31"}, - {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28"}, - {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653"}, - {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5"}, - {file = "yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02"}, - {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53"}, - {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc"}, - {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04"}, - {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4"}, - {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b"}, - {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1"}, - {file = "yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7"}, - {file = "yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c"}, - {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d"}, - {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf"}, - {file = "yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3"}, - {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d"}, - {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c"}, - {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1"}, - {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce"}, - {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3"}, - {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be"}, - {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16"}, - {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513"}, - {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f"}, - {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390"}, - {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458"}, - {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e"}, - {file = "yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d"}, - {file = "yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f"}, - {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"}, - {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"}, - {file = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"}, - {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"}, - {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"}, - {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"}, - {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"}, - {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"}, - {file = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"}, - {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"}, - {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"}, - {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"}, - {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"}, - {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"}, - {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"}, - {file = "yarl-1.20.1-cp39-cp39-win32.whl", hash = "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"}, - {file = "yarl-1.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"}, - {file = "yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}, - {file = "yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac"}, +files = [ + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" -propcache = ">=0.2.1" +propcache = ">=0.2.0" [[package]] name = "zipp" -version = "3.23.0" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["dev", "docs"] files = [ - {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, - {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] -markers = {dev = "python_full_version < \"3.10.2\"", docs = "python_version < \"3.10\""} [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [[package]] @@ -6302,8 +5402,6 @@ version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"zstandard\"" files = [ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, @@ -6412,20 +5510,16 @@ cffi = ["cffi (>=1.11)"] [extras] adlfs = ["adlfs"] -bodo = ["bodo"] -daft = ["daft"] -datafusion = ["datafusion"] +daft = ["getdaft"] duckdb = ["duckdb", "pyarrow"] dynamodb = ["boto3"] -gcp-auth = ["google-auth"] gcsfs = ["gcsfs"] -glue = ["boto3"] -hf = ["huggingface-hub"] +glue = ["boto3", "mypy-boto3-glue"] hive = ["thrift"] -hive-kerberos = ["kerberos", "thrift", "thrift-sasl"] +hive-kerberos = ["thrift", "thrift-sasl"] pandas = ["pandas", "pyarrow"] polars = ["polars"] -pyarrow = ["pyarrow", "pyiceberg-core"] +pyarrow = ["pyarrow"] pyiceberg-core = ["pyiceberg-core"] ray = ["pandas", "pyarrow", "ray", "ray"] rest-sigv4 = ["boto3"] @@ -6436,6 +5530,6 @@ sql-sqlite = ["sqlalchemy"] zstandard = ["zstandard"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = "^3.9.2, !=3.9.7" -content-hash = "bb54e8caddecc4be1b9614edfd2fe105aa486ed7dca172917df39d6c90b33b0a" +content-hash = "ab4250d11db5c0425974f0c58af526d2ec3a2ea343948d14b81f6fa1f494cba2" diff --git a/pyiceberg/__init__.py b/pyiceberg/__init__.py index 35e3de5d23..e97de9276f 100644 --- a/pyiceberg/__init__.py +++ b/pyiceberg/__init__.py @@ -15,4 +15,4 @@ # specific language governing permissions and limitations # under the License. -__version__ = "0.10.0" +__version__ = "0.9.0" diff --git a/pyiceberg/avro/codecs/__init__.py b/pyiceberg/avro/codecs/__init__.py index ce592ccc5a..22e2f71cf8 100644 --- a/pyiceberg/avro/codecs/__init__.py +++ b/pyiceberg/avro/codecs/__init__.py @@ -26,9 +26,7 @@ from __future__ import annotations -from typing import Dict, Literal, Optional, Type - -from typing_extensions import TypeAlias +from typing import Dict, Optional, Type from pyiceberg.avro.codecs.bzip2 import BZip2Codec from pyiceberg.avro.codecs.codec import Codec @@ -36,17 +34,10 @@ from pyiceberg.avro.codecs.snappy_codec import SnappyCodec from pyiceberg.avro.codecs.zstandard_codec import ZStandardCodec -AvroCompressionCodec: TypeAlias = Literal["null", "bzip2", "snappy", "zstandard", "deflate"] - -AVRO_CODEC_KEY = "avro.codec" - -KNOWN_CODECS: Dict[AvroCompressionCodec, Optional[Type[Codec]]] = { +KNOWN_CODECS: Dict[str, Optional[Type[Codec]]] = { "null": None, "bzip2": BZip2Codec, "snappy": SnappyCodec, "zstandard": ZStandardCodec, "deflate": DeflateCodec, } - -# Map to convert the naming from Iceberg to Avro -CODEC_MAPPING_ICEBERG_TO_AVRO: Dict[str, str] = {"gzip": "deflate", "zstd": "zstandard"} diff --git a/pyiceberg/avro/codecs/snappy_codec.py b/pyiceberg/avro/codecs/snappy_codec.py index 2ea7229772..2da8ed8f72 100644 --- a/pyiceberg/avro/codecs/snappy_codec.py +++ b/pyiceberg/avro/codecs/snappy_codec.py @@ -51,9 +51,9 @@ def compress(data: bytes) -> tuple[bytes, int]: @staticmethod def decompress(data: bytes) -> bytes: # Compressed data includes a 4-byte CRC32 checksum - checksum = data[-4:] # store checksum before truncating data - data = data[0:-4] # remove checksum from the data + data = data[0:-4] uncompressed = snappy.decompress(data) + checksum = data[-4:] SnappyCodec._check_crc32(uncompressed, checksum) return uncompressed diff --git a/pyiceberg/avro/codecs/zstandard_codec.py b/pyiceberg/avro/codecs/zstandard_codec.py index 4cc815214f..a048f68490 100644 --- a/pyiceberg/avro/codecs/zstandard_codec.py +++ b/pyiceberg/avro/codecs/zstandard_codec.py @@ -39,7 +39,7 @@ def decompress(data: bytes) -> bytes: if not chunk: break uncompressed.extend(chunk) - return bytes(uncompressed) + return uncompressed except ImportError: diff --git a/pyiceberg/avro/encoder.py b/pyiceberg/avro/encoder.py index 899c65a164..755627e7f3 100644 --- a/pyiceberg/avro/encoder.py +++ b/pyiceberg/avro/encoder.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Any from uuid import UUID from pyiceberg.avro import STRUCT_DOUBLE, STRUCT_FLOAT @@ -75,6 +74,3 @@ def write_uuid(self, uuid: UUID) -> None: if len(uuid.bytes) != 16: raise ValueError(f"Expected UUID to have 16 bytes, got: len({uuid.bytes!r})") return self.write(uuid.bytes) - - def write_unknown(self, _: Any) -> None: - """Nulls are written as 0 bytes in avro, so we do nothing.""" diff --git a/pyiceberg/avro/file.py b/pyiceberg/avro/file.py index 82b042a412..d0da7651b7 100644 --- a/pyiceberg/avro/file.py +++ b/pyiceberg/avro/file.py @@ -35,7 +35,7 @@ TypeVar, ) -from pyiceberg.avro.codecs import AVRO_CODEC_KEY, CODEC_MAPPING_ICEBERG_TO_AVRO, KNOWN_CODECS +from pyiceberg.avro.codecs import KNOWN_CODECS from pyiceberg.avro.codecs.codec import Codec from pyiceberg.avro.decoder import BinaryDecoder, new_decoder from pyiceberg.avro.encoder import BinaryEncoder @@ -69,21 +69,15 @@ NestedField(field_id=300, name="sync", field_type=FixedType(length=SYNC_SIZE), required=True), ) +_CODEC_KEY = "avro.codec" _SCHEMA_KEY = "avro.schema" class AvroFileHeader(Record): - @property - def magic(self) -> bytes: - return self._data[0] - - @property - def meta(self) -> Dict[str, str]: - return self._data[1] - - @property - def sync(self) -> bytes: - return self._data[2] + __slots__ = ("magic", "meta", "sync") + magic: bytes + meta: Dict[str, str] + sync: bytes def compression_codec(self) -> Optional[Type[Codec]]: """Get the file's compression codec algorithm from the file's metadata. @@ -91,13 +85,11 @@ def compression_codec(self) -> Optional[Type[Codec]]: In the case of a null codec, we return a None indicating that we don't need to compress/decompress. """ - from pyiceberg.table import TableProperties - - codec_name = self.meta.get(AVRO_CODEC_KEY, TableProperties.WRITE_AVRO_COMPRESSION_DEFAULT) + codec_name = self.meta.get(_CODEC_KEY, "null") if codec_name not in KNOWN_CODECS: raise ValueError(f"Unsupported codec: {codec_name}") - return KNOWN_CODECS[codec_name] # type: ignore + return KNOWN_CODECS[codec_name] def get_schema(self) -> Schema: if _SCHEMA_KEY in self.meta: @@ -277,36 +269,11 @@ def __exit__( self.output_stream.close() def _write_header(self) -> None: - from pyiceberg.table import TableProperties - - codec_name = self.metadata.get(AVRO_CODEC_KEY, TableProperties.WRITE_AVRO_COMPRESSION_DEFAULT) - if avro_codec_name := CODEC_MAPPING_ICEBERG_TO_AVRO.get(codec_name): - codec_name = avro_codec_name - json_schema = json.dumps(AvroSchemaConversion().iceberg_to_avro(self.file_schema, schema_name=self.schema_name)) - - meta = {**self.metadata, _SCHEMA_KEY: json_schema, AVRO_CODEC_KEY: codec_name} - header = AvroFileHeader(MAGIC, meta, self.sync_bytes) + meta = {**self.metadata, _SCHEMA_KEY: json_schema, _CODEC_KEY: "null"} + header = AvroFileHeader(magic=MAGIC, meta=meta, sync=self.sync_bytes) construct_writer(META_SCHEMA).write(self.encoder, header) - def compression_codec(self) -> Optional[Type[Codec]]: - """Get the file's compression codec algorithm from the file's metadata. - - In the case of a null codec, we return a None indicating that we - don't need to compress/decompress. - """ - from pyiceberg.table import TableProperties - - codec_name = self.metadata.get(AVRO_CODEC_KEY, TableProperties.WRITE_AVRO_COMPRESSION_DEFAULT) - - if avro_codec_name := CODEC_MAPPING_ICEBERG_TO_AVRO.get(codec_name): - codec_name = avro_codec_name - - if codec_name not in KNOWN_CODECS: - raise ValueError(f"Unsupported codec: {codec_name}") - - return KNOWN_CODECS[codec_name] # type: ignore - def write_block(self, objects: List[D]) -> None: in_memory = io.BytesIO() block_content_encoder = BinaryEncoder(output_stream=in_memory) @@ -315,13 +282,6 @@ def write_block(self, objects: List[D]) -> None: block_content = in_memory.getvalue() self.encoder.write_int(len(objects)) - - if codec := self.compression_codec(): - content, content_length = codec.compress(block_content) - self.encoder.write_int(content_length) - self.encoder.write(content) - else: - self.encoder.write_int(len(block_content)) - self.encoder.write(block_content) - + self.encoder.write_int(len(block_content)) + self.encoder.write(block_content) self.encoder.write(self.sync_bytes) diff --git a/pyiceberg/avro/reader.py b/pyiceberg/avro/reader.py index bccc772022..a5578680d6 100644 --- a/pyiceberg/avro/reader.py +++ b/pyiceberg/avro/reader.py @@ -175,14 +175,6 @@ class TimestampReader(IntegerReader): """ -class TimestampNanoReader(IntegerReader): - """Reads a nanosecond granularity timestamp from the stream. - - Long is decoded as python integer which represents - the number of nanoseconds from the unix epoch, 1 January 1970. - """ - - class TimestamptzReader(IntegerReader): """Reads a microsecond granularity timestamptz from the stream. @@ -193,16 +185,6 @@ class TimestamptzReader(IntegerReader): """ -class TimestamptzNanoReader(IntegerReader): - """Reads a microsecond granularity timestamptz from the stream. - - Long is decoded as python integer which represents - the number of nanoseconds from the unix epoch, 1 January 1970. - - Adjusted to UTC. - """ - - class StringReader(Reader): def read(self, decoder: BinaryDecoder) -> str: return decoder.read_utf8() @@ -219,14 +201,6 @@ def skip(self, decoder: BinaryDecoder) -> None: decoder.skip(16) -class UnknownReader(Reader): - def read(self, decoder: BinaryDecoder) -> None: - return None - - def skip(self, decoder: BinaryDecoder) -> None: - pass - - @dataclass(frozen=True) class FixedReader(Reader): _len: int = dataclassfield() @@ -312,14 +286,7 @@ def skip(self, decoder: BinaryDecoder) -> None: class StructReader(Reader): - __slots__ = ( - "field_readers", - "create_struct", - "struct", - "_field_reader_functions", - "_hash", - "_max_pos", - ) + __slots__ = ("field_readers", "create_struct", "struct", "_create_with_keyword", "_field_reader_functions", "_hash") field_readers: Tuple[Tuple[Optional[int], Reader], ...] create_struct: Callable[..., StructProtocol] struct: StructType @@ -333,28 +300,34 @@ def __init__( ) -> None: self.field_readers = field_readers self.create_struct = create_struct - # TODO: Implement struct-reuse self.struct = struct - if not isinstance(self.create_struct(), StructProtocol): + try: + # Try initializing the struct, first with the struct keyword argument + created_struct = self.create_struct(struct=self.struct) + self._create_with_keyword = True + except TypeError as e: + if "'struct' is an invalid keyword argument for" in str(e): + created_struct = self.create_struct() + self._create_with_keyword = False + else: + raise ValueError(f"Unable to initialize struct: {self.create_struct}") from e + + if not isinstance(created_struct, StructProtocol): raise ValueError(f"Incompatible with StructProtocol: {self.create_struct}") reading_callbacks: List[Tuple[Optional[int], Callable[[BinaryDecoder], Any]]] = [] - max_pos = -1 for pos, field in field_readers: if pos is not None: reading_callbacks.append((pos, field.read)) - max_pos = max(max_pos, pos) else: reading_callbacks.append((None, field.skip)) self._field_reader_functions = tuple(reading_callbacks) self._hash = hash(self._field_reader_functions) - self._max_pos = 1 + max_pos def read(self, decoder: BinaryDecoder) -> StructProtocol: - # TODO: Implement struct-reuse - struct = self.create_struct(*[None] * self._max_pos) + struct = self.create_struct(struct=self.struct) if self._create_with_keyword else self.create_struct() for pos, field_reader in self._field_reader_functions: if pos is not None: struct[pos] = field_reader(decoder) # later: pass reuse in here diff --git a/pyiceberg/avro/resolver.py b/pyiceberg/avro/resolver.py index c4ec393513..2a53f4869b 100644 --- a/pyiceberg/avro/resolver.py +++ b/pyiceberg/avro/resolver.py @@ -44,11 +44,8 @@ StringReader, StructReader, TimeReader, - TimestampNanoReader, TimestampReader, - TimestamptzNanoReader, TimestamptzReader, - UnknownReader, UUIDReader, ) from pyiceberg.avro.writer import ( @@ -66,12 +63,9 @@ OptionWriter, StringWriter, StructWriter, - TimestampNanoWriter, - TimestamptzNanoWriter, TimestamptzWriter, TimestampWriter, TimeWriter, - UnknownWriter, UUIDWriter, Writer, ) @@ -103,12 +97,9 @@ PrimitiveType, StringType, StructType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) @@ -190,15 +181,9 @@ def visit_time(self, time_type: TimeType) -> Writer: def visit_timestamp(self, timestamp_type: TimestampType) -> Writer: return TimestampWriter() - def visit_timestamp_ns(self, timestamp_ns_type: TimestampNanoType) -> Writer: - return TimestampNanoWriter() - def visit_timestamptz(self, timestamptz_type: TimestamptzType) -> Writer: return TimestamptzWriter() - def visit_timestamptz_ns(self, timestamptz_ns_type: TimestamptzNanoType) -> Writer: - return TimestamptzNanoWriter() - def visit_string(self, string_type: StringType) -> Writer: return StringWriter() @@ -208,9 +193,6 @@ def visit_uuid(self, uuid_type: UUIDType) -> Writer: def visit_binary(self, binary_type: BinaryType) -> Writer: return BinaryWriter() - def visit_unknown(self, unknown_type: UnknownType) -> Writer: - return UnknownWriter() - CONSTRUCT_WRITER_VISITOR = ConstructWriter() @@ -290,7 +272,7 @@ def struct(self, file_schema: StructType, record_struct: Optional[IcebergType], # There is a default value if file_field.write_default is not None: # The field is not in the record, but there is a write default value - results.append((None, DefaultWriter(writer=writer, value=file_field.write_default))) + results.append((None, DefaultWriter(writer=writer, value=file_field.write_default))) # type: ignore elif file_field.required: raise ValueError(f"Field is required, and there is no write default: {file_field}") else: @@ -344,15 +326,9 @@ def visit_time(self, time_type: TimeType, partner: Optional[IcebergType]) -> Wri def visit_timestamp(self, timestamp_type: TimestampType, partner: Optional[IcebergType]) -> Writer: return TimestampWriter() - def visit_timestamp_ns(self, timestamp_ns_type: TimestampNanoType, partner: Optional[IcebergType]) -> Writer: - return TimestampNanoWriter() - def visit_timestamptz(self, timestamptz_type: TimestamptzType, partner: Optional[IcebergType]) -> Writer: return TimestamptzWriter() - def visit_timestamptz_ns(self, timestamptz_ns_type: TimestamptzNanoType, partner: Optional[IcebergType]) -> Writer: - return TimestamptzNanoWriter() - def visit_string(self, string_type: StringType, partner: Optional[IcebergType]) -> Writer: return StringWriter() @@ -365,9 +341,6 @@ def visit_fixed(self, fixed_type: FixedType, partner: Optional[IcebergType]) -> def visit_binary(self, binary_type: BinaryType, partner: Optional[IcebergType]) -> Writer: return BinaryWriter() - def visit_unknown(self, unknown_type: UnknownType, partner: Optional[IcebergType]) -> Writer: - return UnknownWriter() - class ReadSchemaResolver(PrimitiveWithPartnerVisitor[IcebergType, Reader]): __slots__ = ("read_types", "read_enums", "context") @@ -483,15 +456,9 @@ def visit_time(self, time_type: TimeType, partner: Optional[IcebergType]) -> Rea def visit_timestamp(self, timestamp_type: TimestampType, partner: Optional[IcebergType]) -> Reader: return TimestampReader() - def visit_timestamp_ns(self, timestamp_ns_type: TimestampNanoType, partner: Optional[IcebergType]) -> Reader: - return TimestampNanoReader() - def visit_timestamptz(self, timestamptz_type: TimestamptzType, partner: Optional[IcebergType]) -> Reader: return TimestamptzReader() - def visit_timestamptz_ns(self, timestamptz_ns_type: TimestamptzNanoType, partner: Optional[IcebergType]) -> Reader: - return TimestamptzNanoReader() - def visit_string(self, string_type: StringType, partner: Optional[IcebergType]) -> Reader: return StringReader() @@ -504,9 +471,6 @@ def visit_fixed(self, fixed_type: FixedType, partner: Optional[IcebergType]) -> def visit_binary(self, binary_type: BinaryType, partner: Optional[IcebergType]) -> Reader: return BinaryReader() - def visit_unknown(self, unknown_type: UnknownType, partner: Optional[IcebergType]) -> Reader: - return UnknownReader() - class SchemaPartnerAccessor(PartnerAccessor[IcebergType]): def schema_partner(self, partner: Optional[IcebergType]) -> Optional[IcebergType]: diff --git a/pyiceberg/avro/writer.py b/pyiceberg/avro/writer.py index 6fa485f21a..b53230d3c7 100644 --- a/pyiceberg/avro/writer.py +++ b/pyiceberg/avro/writer.py @@ -32,7 +32,6 @@ List, Optional, Tuple, - Union, ) from uuid import UUID @@ -96,24 +95,12 @@ def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) -@dataclass(frozen=True) -class TimestampNanoWriter(Writer): - def write(self, encoder: BinaryEncoder, val: int) -> None: - encoder.write_int(val) - - @dataclass(frozen=True) class TimestamptzWriter(Writer): def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) -@dataclass(frozen=True) -class TimestamptzNanoWriter(Writer): - def write(self, encoder: BinaryEncoder, val: int) -> None: - encoder.write_int(val) - - @dataclass(frozen=True) class StringWriter(Writer): def write(self, encoder: BinaryEncoder, val: Any) -> None: @@ -122,17 +109,8 @@ def write(self, encoder: BinaryEncoder, val: Any) -> None: @dataclass(frozen=True) class UUIDWriter(Writer): - def write(self, encoder: BinaryEncoder, val: Union[UUID, bytes]) -> None: - if isinstance(val, UUID): - encoder.write(val.bytes) - else: - encoder.write(val) - - -@dataclass(frozen=True) -class UnknownWriter(Writer): - def write(self, encoder: BinaryEncoder, val: Any) -> None: - encoder.write_unknown(val) + def write(self, encoder: BinaryEncoder, val: UUID) -> None: + encoder.write(val.bytes) @dataclass(frozen=True) diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index 1607541d0b..01dd228efb 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -70,7 +70,6 @@ Identifier, Properties, RecursiveDict, - TableVersion, ) from pyiceberg.utils.config import Config, merge_config from pyiceberg.utils.properties import property_as_bool @@ -196,7 +195,7 @@ def infer_catalog_type(name: str, catalog_properties: RecursiveDict) -> Optional Raises: ValueError: Raises a ValueError in case properties are missing, or the wrong type. """ - if uri := catalog_properties.get(URI): + if uri := catalog_properties.get("uri"): if isinstance(uri, str): if uri.startswith("http"): return CatalogType.REST @@ -253,7 +252,7 @@ def load_catalog(name: Optional[str] = None, **properties: Optional[str]) -> Cat catalog_type = None if provided_catalog_type and isinstance(provided_catalog_type, str): - catalog_type = CatalogType(provided_catalog_type.lower()) + catalog_type = CatalogType[provided_catalog_type.upper()] elif not provided_catalog_type: catalog_type = infer_catalog_type(name, conf) @@ -263,10 +262,6 @@ def load_catalog(name: Optional[str] = None, **properties: Optional[str]) -> Cat raise ValueError(f"Could not initialize catalog with the following properties: {properties}") -def list_catalogs() -> List[str]: - return _ENV_CONFIG.get_known_catalogs() - - def delete_files(io: FileIO, files_to_delete: Set[str], file_type: str) -> None: """Delete files. @@ -744,9 +739,7 @@ def _load_file_io(self, properties: Properties = EMPTY_DICT, location: Optional[ return load_file_io({**self.properties, **properties}, location) @staticmethod - def _convert_schema_if_needed( - schema: Union[Schema, "pa.Schema"], format_version: TableVersion = TableProperties.DEFAULT_FORMAT_VERSION - ) -> Schema: + def _convert_schema_if_needed(schema: Union[Schema, "pa.Schema"]) -> Schema: if isinstance(schema, Schema): return schema try: @@ -757,10 +750,7 @@ def _convert_schema_if_needed( downcast_ns_timestamp_to_us = Config().get_bool(DOWNCAST_NS_TIMESTAMP_TO_US_ON_WRITE) or False if isinstance(schema, pa.Schema): schema: Schema = visit_pyarrow( # type: ignore - schema, - _ConvertToIcebergWithoutIDs( - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, format_version=format_version - ), + schema, _ConvertToIcebergWithoutIDs(downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us) ) return schema except ModuleNotFoundError: @@ -853,10 +843,7 @@ def _create_staged_table( Returns: StagedTable: the created staged table instance. """ - schema: Schema = self._convert_schema_if_needed( # type: ignore - schema, - int(properties.get(TableProperties.FORMAT_VERSION, TableProperties.DEFAULT_FORMAT_VERSION)), # type: ignore - ) + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore database_name, table_name = self.identifier_to_database_and_table(identifier) @@ -936,20 +923,6 @@ def _resolve_table_location(self, location: Optional[str], database_name: str, t return location.rstrip("/") def _get_default_warehouse_location(self, database_name: str, table_name: str) -> str: - """Return the default warehouse location using the convention of `warehousePath/databaseName/tableName`.""" - database_properties = self.load_namespace_properties(database_name) - if database_location := database_properties.get(LOCATION): - database_location = database_location.rstrip("/") - return f"{database_location}/{table_name}" - - if warehouse_path := self.properties.get(WAREHOUSE_LOCATION): - warehouse_path = warehouse_path.rstrip("/") - return f"{warehouse_path}/{database_name}/{table_name}" - - raise ValueError("No default path is set, please specify a location when creating a table") - - def _get_hive_style_warehouse_location(self, database_name: str, table_name: str) -> str: - """Return the default warehouse location following the Hive convention of `warehousePath/databaseName.db/tableName`.""" database_properties = self.load_namespace_properties(database_name) if database_location := database_properties.get(LOCATION): database_location = database_location.rstrip("/") diff --git a/pyiceberg/catalog/dynamodb.py b/pyiceberg/catalog/dynamodb.py index 3b37762638..63466b0142 100644 --- a/pyiceberg/catalog/dynamodb.py +++ b/pyiceberg/catalog/dynamodb.py @@ -66,8 +66,6 @@ if TYPE_CHECKING: import pyarrow as pa - from mypy_boto3_dynamodb.client import DynamoDBClient - DYNAMODB_CLIENT = "dynamodb" @@ -96,28 +94,18 @@ class DynamoDbCatalog(MetastoreCatalog): - def __init__(self, name: str, client: Optional["DynamoDBClient"] = None, **properties: str): - """Dynamodb catalog. - - Args: - name: Name to identify the catalog. - client: An optional boto3 dynamodb client. - properties: Properties for dynamodb client construction and configuration. - """ + def __init__(self, name: str, **properties: str): super().__init__(name, **properties) - if client is not None: - self.dynamodb = client - else: - session = boto3.Session( - profile_name=properties.get(DYNAMODB_PROFILE_NAME), - region_name=get_first_property_value(properties, DYNAMODB_REGION, AWS_REGION), - botocore_session=properties.get(BOTOCORE_SESSION), - aws_access_key_id=get_first_property_value(properties, DYNAMODB_ACCESS_KEY_ID, AWS_ACCESS_KEY_ID), - aws_secret_access_key=get_first_property_value(properties, DYNAMODB_SECRET_ACCESS_KEY, AWS_SECRET_ACCESS_KEY), - aws_session_token=get_first_property_value(properties, DYNAMODB_SESSION_TOKEN, AWS_SESSION_TOKEN), - ) - self.dynamodb = session.client(DYNAMODB_CLIENT) + session = boto3.Session( + profile_name=properties.get(DYNAMODB_PROFILE_NAME), + region_name=get_first_property_value(properties, DYNAMODB_REGION, AWS_REGION), + botocore_session=properties.get(BOTOCORE_SESSION), + aws_access_key_id=get_first_property_value(properties, DYNAMODB_ACCESS_KEY_ID, AWS_ACCESS_KEY_ID), + aws_secret_access_key=get_first_property_value(properties, DYNAMODB_SECRET_ACCESS_KEY, AWS_SECRET_ACCESS_KEY), + aws_session_token=get_first_property_value(properties, DYNAMODB_SESSION_TOKEN, AWS_SESSION_TOKEN), + ) + self.dynamodb = session.client(DYNAMODB_CLIENT) self.dynamodb_table_name = self.properties.get(DYNAMODB_TABLE_NAME, DYNAMODB_TABLE_NAME_DEFAULT) self._ensure_catalog_table_exists_or_create() @@ -664,10 +652,6 @@ def _convert_dynamo_table_item_to_iceberg_table(self, dynamo_table_item: Dict[st catalog=self, ) - def _get_default_warehouse_location(self, database_name: str, table_name: str) -> str: - """Override the default warehouse location to follow Hive-style conventions.""" - return self._get_hive_style_warehouse_location(database_name, table_name) - def _get_create_table_item(database_name: str, table_name: str, properties: Properties, metadata_location: str) -> Dict[str, Any]: current_timestamp_ms = str(round(time() * 1000)) @@ -840,9 +824,7 @@ def _convert_dynamo_item_to_regular_dict(dynamo_json: Dict[str, Any]) -> Dict[st raise ValueError("Only S and N data types are supported.") values = list(val_dict.values()) - if len(values) != 1: - raise ValueError(f"Expecting only 1 value: {values}") - + assert len(values) == 1 column_value = values[0] regular_json[column_name] = column_value diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 4eb4164e57..4f0a9061df 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -30,6 +30,15 @@ import boto3 from botocore.config import Config +from mypy_boto3_glue.client import GlueClient +from mypy_boto3_glue.type_defs import ( + ColumnTypeDef, + DatabaseInputTypeDef, + DatabaseTypeDef, + StorageDescriptorTypeDef, + TableInputTypeDef, + TableTypeDef, +) from pyiceberg.catalog import ( BOTOCORE_SESSION, @@ -92,15 +101,6 @@ if TYPE_CHECKING: import pyarrow as pa - from mypy_boto3_glue.client import GlueClient - from mypy_boto3_glue.type_defs import ( - ColumnTypeDef, - DatabaseInputTypeDef, - DatabaseTypeDef, - StorageDescriptorTypeDef, - TableInputTypeDef, - TableTypeDef, - ) # There is a unique Glue metastore in each AWS account and each AWS region. By default, GlueCatalog chooses the Glue @@ -140,20 +140,12 @@ def _construct_parameters( - metadata_location: str, - glue_table: Optional["TableTypeDef"] = None, - prev_metadata_location: Optional[str] = None, - metadata_properties: Optional[Properties] = None, + metadata_location: str, glue_table: Optional[TableTypeDef] = None, prev_metadata_location: Optional[str] = None ) -> Properties: new_parameters = glue_table.get("Parameters", {}) if glue_table else {} new_parameters.update({TABLE_TYPE: ICEBERG.upper(), METADATA_LOCATION: metadata_location}) if prev_metadata_location: new_parameters[PREVIOUS_METADATA_LOCATION] = prev_metadata_location - - if metadata_properties: - for key, value in metadata_properties.items(): - new_parameters[key] = str(value) - return new_parameters @@ -198,15 +190,15 @@ def primitive(self, primitive: PrimitiveType) -> str: return GLUE_PRIMITIVE_TYPES[primitive_type] -def _to_columns(metadata: TableMetadata) -> List["ColumnTypeDef"]: - results: Dict[str, "ColumnTypeDef"] = {} +def _to_columns(metadata: TableMetadata) -> List[ColumnTypeDef]: + results: Dict[str, ColumnTypeDef] = {} def _append_to_results(field: NestedField, is_current: bool) -> None: if field.name in results: return results[field.name] = cast( - "ColumnTypeDef", + ColumnTypeDef, { "Name": field.name, "Type": visit(field.field_type, _IcebergSchemaToGlueType()), @@ -238,13 +230,13 @@ def _construct_table_input( metadata_location: str, properties: Properties, metadata: TableMetadata, - glue_table: Optional["TableTypeDef"] = None, + glue_table: Optional[TableTypeDef] = None, prev_metadata_location: Optional[str] = None, -) -> "TableInputTypeDef": - table_input: "TableInputTypeDef" = { +) -> TableInputTypeDef: + table_input: TableInputTypeDef = { "Name": table_name, "TableType": EXTERNAL_TABLE, - "Parameters": _construct_parameters(metadata_location, glue_table, prev_metadata_location, properties), + "Parameters": _construct_parameters(metadata_location, glue_table, prev_metadata_location), "StorageDescriptor": { "Columns": _to_columns(metadata), "Location": metadata.location, @@ -257,12 +249,10 @@ def _construct_table_input( return table_input -def _construct_rename_table_input(to_table_name: str, glue_table: "TableTypeDef") -> "TableInputTypeDef": - rename_table_input: "TableInputTypeDef" = {"Name": to_table_name} +def _construct_rename_table_input(to_table_name: str, glue_table: TableTypeDef) -> TableInputTypeDef: + rename_table_input: TableInputTypeDef = {"Name": to_table_name} # use the same Glue info to create the new table, pointing to the old metadata - if not glue_table["TableType"]: - raise ValueError("Glue table type is missing, cannot rename table") - + assert glue_table["TableType"] rename_table_input["TableType"] = glue_table["TableType"] if "Owner" in glue_table: rename_table_input["Owner"] = glue_table["Owner"] @@ -274,7 +264,7 @@ def _construct_rename_table_input(to_table_name: str, glue_table: "TableTypeDef" # It turns out the output of StorageDescriptor is not the same as the input type # because the Column can have a different type, but for now it seems to work, so # silence the type error. - rename_table_input["StorageDescriptor"] = cast("StorageDescriptorTypeDef", glue_table["StorageDescriptor"]) + rename_table_input["StorageDescriptor"] = cast(StorageDescriptorTypeDef, glue_table["StorageDescriptor"]) if "Description" in glue_table: rename_table_input["Description"] = glue_table["Description"] @@ -282,8 +272,8 @@ def _construct_rename_table_input(to_table_name: str, glue_table: "TableTypeDef" return rename_table_input -def _construct_database_input(database_name: str, properties: Properties) -> "DatabaseInputTypeDef": - database_input: "DatabaseInputTypeDef" = {"Name": database_name} +def _construct_database_input(database_name: str, properties: Properties) -> DatabaseInputTypeDef: + database_input: DatabaseInputTypeDef = {"Name": database_name} parameters = {} for k, v in properties.items(): if k == "Description": @@ -296,7 +286,7 @@ def _construct_database_input(database_name: str, properties: Properties) -> "Da return database_input -def _register_glue_catalog_id_with_glue_client(glue: "GlueClient", glue_catalog_id: str) -> None: +def _register_glue_catalog_id_with_glue_client(glue: GlueClient, glue_catalog_id: str) -> None: """ Register the Glue Catalog ID (AWS Account ID) as a parameter on all Glue client methods. @@ -313,58 +303,39 @@ def add_glue_catalog_id(params: Dict[str, str], **kwargs: Any) -> None: class GlueCatalog(MetastoreCatalog): - glue: "GlueClient" - - def __init__(self, name: str, client: Optional["GlueClient"] = None, **properties: Any): - """Glue Catalog. - - You either need to provide a boto3 glue client, or one will be constructed from the properties. - - Args: - name: Name to identify the catalog. - client: An optional boto3 glue client. - properties: Properties for glue client construction and configuration. - """ + def __init__(self, name: str, **properties: Any): super().__init__(name, **properties) - if client is not None: - self.glue = client - else: - retry_mode_prop_value = get_first_property_value(properties, GLUE_RETRY_MODE) - - session = boto3.Session( - profile_name=properties.get(GLUE_PROFILE_NAME), - region_name=get_first_property_value(properties, GLUE_REGION, AWS_REGION), - botocore_session=properties.get(BOTOCORE_SESSION), - aws_access_key_id=get_first_property_value(properties, GLUE_ACCESS_KEY_ID, AWS_ACCESS_KEY_ID), - aws_secret_access_key=get_first_property_value(properties, GLUE_SECRET_ACCESS_KEY, AWS_SECRET_ACCESS_KEY), - aws_session_token=get_first_property_value(properties, GLUE_SESSION_TOKEN, AWS_SESSION_TOKEN), - ) - self.glue: GlueClient = session.client( - "glue", - endpoint_url=properties.get(GLUE_CATALOG_ENDPOINT), - config=Config( - retries={ - "max_attempts": properties.get(GLUE_MAX_RETRIES, MAX_RETRIES), - "mode": retry_mode_prop_value if retry_mode_prop_value in EXISTING_RETRY_MODES else STANDARD_RETRY_MODE, - } - ), - ) - - if glue_catalog_id := properties.get(GLUE_ID): - _register_glue_catalog_id_with_glue_client(self.glue, glue_catalog_id) + retry_mode_prop_value = get_first_property_value(properties, GLUE_RETRY_MODE) - def _convert_glue_to_iceberg(self, glue_table: "TableTypeDef") -> Table: - properties: Properties = glue_table["Parameters"] + session = boto3.Session( + profile_name=properties.get(GLUE_PROFILE_NAME), + region_name=get_first_property_value(properties, GLUE_REGION, AWS_REGION), + botocore_session=properties.get(BOTOCORE_SESSION), + aws_access_key_id=get_first_property_value(properties, GLUE_ACCESS_KEY_ID, AWS_ACCESS_KEY_ID), + aws_secret_access_key=get_first_property_value(properties, GLUE_SECRET_ACCESS_KEY, AWS_SECRET_ACCESS_KEY), + aws_session_token=get_first_property_value(properties, GLUE_SESSION_TOKEN, AWS_SESSION_TOKEN), + ) + self.glue: GlueClient = session.client( + "glue", + endpoint_url=properties.get(GLUE_CATALOG_ENDPOINT), + config=Config( + retries={ + "max_attempts": properties.get(GLUE_MAX_RETRIES, MAX_RETRIES), + "mode": retry_mode_prop_value if retry_mode_prop_value in EXISTING_RETRY_MODES else STANDARD_RETRY_MODE, + } + ), + ) - database_name = glue_table.get("DatabaseName", None) - if database_name is None: - raise ValueError("Glue table is missing DatabaseName property") + if glue_catalog_id := properties.get(GLUE_ID): + _register_glue_catalog_id_with_glue_client(self.glue, glue_catalog_id) - parameters = glue_table.get("Parameters", None) - if parameters is None: - raise ValueError("Glue table is missing Parameters property") + def _convert_glue_to_iceberg(self, glue_table: TableTypeDef) -> Table: + properties: Properties = glue_table["Parameters"] + assert glue_table["DatabaseName"] + assert glue_table["Parameters"] + database_name = glue_table["DatabaseName"] table_name = glue_table["Name"] if TABLE_TYPE not in properties: @@ -395,7 +366,7 @@ def _convert_glue_to_iceberg(self, glue_table: "TableTypeDef") -> Table: catalog=self, ) - def _create_glue_table(self, database_name: str, table_name: str, table_input: "TableInputTypeDef") -> None: + def _create_glue_table(self, database_name: str, table_name: str, table_input: TableInputTypeDef) -> None: try: self.glue.create_table(DatabaseName=database_name, TableInput=table_input) except self.glue.exceptions.AlreadyExistsException as e: @@ -403,7 +374,7 @@ def _create_glue_table(self, database_name: str, table_name: str, table_input: " except self.glue.exceptions.EntityNotFoundException as e: raise NoSuchNamespaceError(f"Database {database_name} does not exist") from e - def _update_glue_table(self, database_name: str, table_name: str, table_input: "TableInputTypeDef", version_id: str) -> None: + def _update_glue_table(self, database_name: str, table_name: str, table_input: TableInputTypeDef, version_id: str) -> None: try: self.glue.update_table( DatabaseName=database_name, @@ -418,7 +389,7 @@ def _update_glue_table(self, database_name: str, table_name: str, table_input: " f"Cannot commit {database_name}.{table_name} because Glue detected concurrent update to table version {version_id}" ) from e - def _get_glue_table(self, database_name: str, table_name: str) -> "TableTypeDef": + def _get_glue_table(self, database_name: str, table_name: str) -> TableTypeDef: try: load_table_response = self.glue.get_table(DatabaseName=database_name, Name=table_name) return load_table_response["Table"] @@ -511,7 +482,7 @@ def commit_table( table_identifier = table.name() database_name, table_name = self.identifier_to_database_and_table(table_identifier, NoSuchTableError) - current_glue_table: Optional["TableTypeDef"] + current_glue_table: Optional[TableTypeDef] glue_table_version_id: Optional[str] current_table: Optional[Table] try: @@ -695,19 +666,13 @@ def drop_namespace(self, namespace: Union[str, Identifier]) -> None: """ database_name = self.identifier_to_database(namespace, NoSuchNamespaceError) try: - table_list_response = self.glue.get_tables(DatabaseName=database_name) - table_list = table_list_response["TableList"] - except self.glue.exceptions.EntityNotFoundException as e: + table_list = self.list_tables(namespace=database_name) + except NoSuchNamespaceError as e: raise NoSuchNamespaceError(f"Database does not exist: {database_name}") from e if len(table_list) > 0: - first_table = table_list[0] - if self.__is_iceberg_table(first_table): - raise NamespaceNotEmptyError(f"Cannot drop namespace {database_name} because it still contains Iceberg tables") - else: - raise NamespaceNotEmptyError( - f"Cannot drop namespace {database_name} because it still contains non-Iceberg tables" - ) + raise NamespaceNotEmptyError(f"Database {database_name} is not empty") + self.glue.delete_database(Name=database_name) def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: @@ -723,7 +688,7 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: NoSuchNamespaceError: If a namespace with the given name does not exist, or the identifier is invalid. """ database_name = self.identifier_to_database(namespace, NoSuchNamespaceError) - table_list: List["TableTypeDef"] = [] + table_list: List[TableTypeDef] = [] next_token: Optional[str] = None try: while True: @@ -751,7 +716,7 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi if namespace: return [] - database_list: List["DatabaseTypeDef"] = [] + database_list: List[DatabaseTypeDef] = [] next_token: Optional[str] = None while True: @@ -827,9 +792,5 @@ def view_exists(self, identifier: Union[str, Identifier]) -> bool: raise NotImplementedError @staticmethod - def __is_iceberg_table(table: "TableTypeDef") -> bool: + def __is_iceberg_table(table: TableTypeDef) -> bool: return table.get("Parameters", {}).get(TABLE_TYPE, "").lower() == ICEBERG - - def _get_default_warehouse_location(self, database_name: str, table_name: str) -> str: - """Override the default warehouse location to follow Hive-style conventions.""" - return self._get_hive_style_warehouse_location(database_name, table_name) diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index eef6bbad18..7189b12c13 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -36,7 +36,6 @@ from hive_metastore.ttypes import ( AlreadyExistsException, CheckLockRequest, - EnvironmentContext, FieldSchema, InvalidOperationException, LockComponent, @@ -63,7 +62,6 @@ LOCATION, METADATA_LOCATION, TABLE_TYPE, - URI, MetastoreCatalog, PropertiesUpdateSummary, ) @@ -112,7 +110,6 @@ TimestampType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) from pyiceberg.utils.properties import property_as_bool, property_as_float @@ -130,8 +127,6 @@ HIVE_KERBEROS_AUTH = "hive.kerberos-authentication" HIVE_KERBEROS_AUTH_DEFAULT = False -HIVE_KERBEROS_SERVICE_NAME = "hive.kerberos-service-name" -HIVE_KERBEROS_SERVICE_NAME_DEFAULT = "hive" LOCK_CHECK_MIN_WAIT_TIME = "lock-check-min-wait-time" LOCK_CHECK_MAX_WAIT_TIME = "lock-check-max-wait-time" @@ -139,8 +134,6 @@ DEFAULT_LOCK_CHECK_MIN_WAIT_TIME = 0.1 # 100 milliseconds DEFAULT_LOCK_CHECK_MAX_WAIT_TIME = 60 # 1 min DEFAULT_LOCK_CHECK_RETRIES = 4 -DO_NOT_UPDATE_STATS = "DO_NOT_UPDATE_STATS" -DO_NOT_UPDATE_STATS_DEFAULT = "true" logger = logging.getLogger(__name__) @@ -149,53 +142,40 @@ class _HiveClient: """Helper class to nicely open and close the transport.""" _transport: TTransport + _client: Client _ugi: Optional[List[str]] - def __init__( - self, - uri: str, - ugi: Optional[str] = None, - kerberos_auth: Optional[bool] = HIVE_KERBEROS_AUTH_DEFAULT, - kerberos_service_name: Optional[str] = HIVE_KERBEROS_SERVICE_NAME, - ): + def __init__(self, uri: str, ugi: Optional[str] = None, kerberos_auth: Optional[bool] = HIVE_KERBEROS_AUTH_DEFAULT): self._uri = uri self._kerberos_auth = kerberos_auth - self._kerberos_service_name = kerberos_service_name self._ugi = ugi.split(":") if ugi else None - self._transport = self._init_thrift_transport() - def _init_thrift_transport(self) -> TTransport: + self._init_thrift_client() + + def _init_thrift_client(self) -> None: url_parts = urlparse(self._uri) + socket = TSocket.TSocket(url_parts.hostname, url_parts.port) + if not self._kerberos_auth: - return TTransport.TBufferedTransport(socket) + self._transport = TTransport.TBufferedTransport(socket) else: - return TTransport.TSaslClientTransport(socket, host=url_parts.hostname, service=self._kerberos_service_name) + self._transport = TTransport.TSaslClientTransport(socket, host=url_parts.hostname, service="hive") - def _client(self) -> Client: protocol = TBinaryProtocol.TBinaryProtocol(self._transport) - client = Client(protocol) - if self._ugi: - client.set_ugi(*self._ugi) - return client + + self._client = Client(protocol) def __enter__(self) -> Client: - """Make sure the transport is initialized and open.""" - if not self._transport.isOpen(): - try: - self._transport.open() - except (TypeError, TTransport.TTransportException): - # reinitialize _transport - self._transport = self._init_thrift_transport() - self._transport.open() - return self._client() # recreate the client + self._transport.open() + if self._ugi: + self._client.set_ugi(*self._ugi) + return self._client def __exit__( self, exctype: Optional[Type[BaseException]], excinst: Optional[BaseException], exctb: Optional[TracebackType] ) -> None: - """Close transport if it was opened.""" - if self._transport.isOpen(): - self._transport.close() + self._transport.close() def _construct_hive_storage_descriptor( @@ -221,18 +201,11 @@ def _construct_hive_storage_descriptor( DEFAULT_PROPERTIES = {TableProperties.PARQUET_COMPRESSION: TableProperties.PARQUET_COMPRESSION_DEFAULT} -def _construct_parameters( - metadata_location: str, previous_metadata_location: Optional[str] = None, metadata_properties: Optional[Properties] = None -) -> Dict[str, Any]: +def _construct_parameters(metadata_location: str, previous_metadata_location: Optional[str] = None) -> Dict[str, Any]: properties = {PROP_EXTERNAL: "TRUE", PROP_TABLE_TYPE: "ICEBERG", PROP_METADATA_LOCATION: metadata_location} if previous_metadata_location: properties[PROP_PREVIOUS_METADATA_LOCATION] = previous_metadata_location - if metadata_properties: - for key, value in metadata_properties.items(): - if key not in properties: - properties[key] = str(value) - return properties @@ -263,7 +236,6 @@ def _annotate_namespace(database: HiveDatabase, properties: Properties) -> HiveD UUIDType: "string", BinaryType: "binary", FixedType: "binary", - UnknownType: "void", } @@ -317,20 +289,19 @@ def __init__(self, name: str, **properties: str): @staticmethod def _create_hive_client(properties: Dict[str, str]) -> _HiveClient: last_exception = None - for uri in properties[URI].split(","): + for uri in properties["uri"].split(","): try: return _HiveClient( uri, properties.get("ugi"), property_as_bool(properties, HIVE_KERBEROS_AUTH, HIVE_KERBEROS_AUTH_DEFAULT), - properties.get(HIVE_KERBEROS_SERVICE_NAME, HIVE_KERBEROS_SERVICE_NAME_DEFAULT), ) except BaseException as e: last_exception = e if last_exception is not None: raise last_exception else: - raise ValueError(f"Unable to connect to hive using uri: {properties[URI]}") + raise ValueError(f"Unable to connect to hive using uri: {properties['uri']}") def _convert_hive_into_iceberg(self, table: HiveTable) -> Table: properties: Dict[str, str] = table.parameters @@ -378,7 +349,7 @@ def _convert_iceberg_into_hive(self, table: Table) -> HiveTable: property_as_bool(self.properties, HIVE2_COMPATIBLE, HIVE2_COMPATIBLE_DEFAULT), ), tableType=EXTERNAL_TABLE, - parameters=_construct_parameters(metadata_location=table.metadata_location, metadata_properties=table.properties), + parameters=_construct_parameters(table.metadata_location), ) def _create_hive_table(self, open_client: Client, hive_table: HiveTable) -> None: @@ -559,20 +530,8 @@ def commit_table( hive_table.parameters = _construct_parameters( metadata_location=updated_staged_table.metadata_location, previous_metadata_location=current_table.metadata_location, - metadata_properties=updated_staged_table.properties, - ) - # Update hive's schema and properties - hive_table.sd = _construct_hive_storage_descriptor( - updated_staged_table.schema(), - updated_staged_table.location(), - property_as_bool(updated_staged_table.properties, HIVE2_COMPATIBLE, HIVE2_COMPATIBLE_DEFAULT), - ) - open_client.alter_table_with_environment_context( - dbname=database_name, - tbl_name=table_name, - new_tbl=hive_table, - environment_context=EnvironmentContext(properties={DO_NOT_UPDATE_STATS: DO_NOT_UPDATE_STATS_DEFAULT}), ) + open_client.alter_table(dbname=database_name, tbl_name=table_name, new_tbl=hive_table) else: # Table does not exist, create it. hive_table = self._convert_iceberg_into_hive( @@ -659,12 +618,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U tbl = open_client.get_table(dbname=from_database_name, tbl_name=from_table_name) tbl.dbName = to_database_name tbl.tableName = to_table_name - open_client.alter_table_with_environment_context( - dbname=from_database_name, - tbl_name=from_table_name, - new_tbl=tbl, - environment_context=EnvironmentContext(properties={DO_NOT_UPDATE_STATS: DO_NOT_UPDATE_STATS_DEFAULT}), - ) + open_client.alter_table(dbname=from_database_name, tbl_name=from_table_name, new_tbl=tbl) except NoSuchObjectException as e: raise NoSuchTableError(f"Table does not exist: {from_table_name}") from e except InvalidOperationException as e: @@ -800,7 +754,7 @@ def update_namespace_properties( if removals: for key in removals: if key in parameters: - parameters.pop(key) + parameters[key] = None removed.add(key) if updates: for key, value in updates.items(): @@ -815,7 +769,3 @@ def update_namespace_properties( def drop_view(self, identifier: Union[str, Identifier]) -> None: raise NotImplementedError - - def _get_default_warehouse_location(self, database_name: str, table_name: str) -> str: - """Override the default warehouse location to follow Hive-style conventions.""" - return self._get_hive_style_warehouse_location(database_name, table_name) diff --git a/pyiceberg/catalog/memory.py b/pyiceberg/catalog/memory.py index 024d14fba6..7d6053baaf 100644 --- a/pyiceberg/catalog/memory.py +++ b/pyiceberg/catalog/memory.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. -from pyiceberg.catalog import URI from pyiceberg.catalog.sql import SqlCatalog @@ -28,6 +27,6 @@ class InMemoryCatalog(SqlCatalog): def __init__(self, name: str, warehouse: str = "file:///tmp/iceberg/warehouse", **kwargs: str) -> None: self._warehouse_location = warehouse - if URI not in kwargs: - kwargs[URI] = "sqlite:///:memory:" + if "uri" not in kwargs: + kwargs["uri"] = "sqlite:///:memory:" super().__init__(name=name, warehouse=warehouse, **kwargs) diff --git a/pyiceberg/catalog/rest/__init__.py b/pyiceberg/catalog/rest.py similarity index 80% rename from pyiceberg/catalog/rest/__init__.py rename to pyiceberg/catalog/rest.py index 6a1868e58a..6d91124801 100644 --- a/pyiceberg/catalog/rest/__init__.py +++ b/pyiceberg/catalog/rest.py @@ -15,46 +15,51 @@ # specific language governing permissions and limitations # under the License. from enum import Enum +from json import JSONDecodeError from typing import ( TYPE_CHECKING, Any, Dict, List, + Literal, Optional, Set, Tuple, + Type, Union, ) -from pydantic import Field, field_validator +from pydantic import Field, ValidationError, field_validator from requests import HTTPError, Session from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt from pyiceberg import __version__ from pyiceberg.catalog import ( - BOTOCORE_SESSION, TOKEN, URI, WAREHOUSE_LOCATION, Catalog, PropertiesUpdateSummary, ) -from pyiceberg.catalog.rest.auth import AuthManager, AuthManagerAdapter, AuthManagerFactory, LegacyOAuth2AuthManager -from pyiceberg.catalog.rest.response import _handle_non_200_response from pyiceberg.exceptions import ( AuthorizationExpiredError, + BadRequestError, CommitFailedException, CommitStateUnknownException, + ForbiddenError, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchIdentifierError, NoSuchNamespaceError, NoSuchTableError, NoSuchViewError, + OAuthError, + RESTError, + ServerError, + ServiceUnavailableError, TableAlreadyExistsError, UnauthorizedError, ) -from pyiceberg.io import AWS_ACCESS_KEY_ID, AWS_REGION, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec, assign_fresh_partition_spec_ids from pyiceberg.schema import Schema, assign_fresh_schema_ids from pyiceberg.table import ( @@ -64,7 +69,6 @@ StagedTable, Table, TableIdentifier, - TableProperties, ) from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder, assign_fresh_sort_order_ids @@ -75,7 +79,7 @@ from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel, Identifier, Properties from pyiceberg.types import transform_dict_value_to_str from pyiceberg.utils.deprecated import deprecation_message -from pyiceberg.utils.properties import get_first_property_value, get_header_properties, property_as_bool +from pyiceberg.utils.properties import get_header_properties, property_as_bool if TYPE_CHECKING: import pyarrow as pa @@ -96,7 +100,7 @@ class Endpoints: register_table = "namespaces/{namespace}/register" load_table: str = "namespaces/{namespace}/tables/{table}" update_table: str = "namespaces/{namespace}/tables/{table}" - drop_table: str = "namespaces/{namespace}/tables/{table}" + drop_table: str = "namespaces/{namespace}/tables/{table}?purgeRequested={purge}" table_exists: str = "namespaces/{namespace}/tables/{table}" get_token: str = "oauth/tokens" rename_table: str = "tables/rename" @@ -134,9 +138,6 @@ class IdentifierKind(Enum): SIGV4_REGION = "rest.signing-region" SIGV4_SERVICE = "rest.signing-name" OAUTH2_SERVER_URI = "oauth2-server-uri" -SNAPSHOT_LOADING_MODE = "snapshot-loading-mode" -AUTH = "auth" -CUSTOM = "custom" NAMESPACE_SEPARATOR = b"\x1f".decode(UTF8) @@ -147,7 +148,7 @@ def _retry_hook(retry_state: RetryCallState) -> None: _RETRY_ARGS = { - "retry": retry_if_exception_type((AuthorizationExpiredError, UnauthorizedError)), + "retry": retry_if_exception_type(AuthorizationExpiredError), "stop": stop_after_attempt(2), "before_sleep": _retry_hook, "reraise": True, @@ -180,9 +181,18 @@ class RegisterTableRequest(IcebergBaseModel): metadata_location: str = Field(..., alias="metadata-location") +class TokenResponse(IcebergBaseModel): + access_token: str = Field() + token_type: str = Field() + expires_in: Optional[int] = Field(default=None) + issued_token_type: Optional[str] = Field(default=None) + refresh_token: Optional[str] = Field(default=None) + scope: Optional[str] = Field(default=None) + + class ConfigResponse(IcebergBaseModel): - defaults: Optional[Properties] = Field(default_factory=dict) - overrides: Optional[Properties] = Field(default_factory=dict) + defaults: Properties = Field() + overrides: Properties = Field() class ListNamespaceResponse(IcebergBaseModel): @@ -218,6 +228,24 @@ class ListViewsResponse(IcebergBaseModel): identifiers: List[ListViewResponseEntry] = Field() +class ErrorResponseMessage(IcebergBaseModel): + message: str = Field() + type: str = Field() + code: int = Field() + + +class ErrorResponse(IcebergBaseModel): + error: ErrorResponseMessage = Field() + + +class OAuthErrorResponse(IcebergBaseModel): + error: Literal[ + "invalid_request", "invalid_client", "invalid_grant", "unauthorized_client", "unsupported_grant_type", "invalid_scope" + ] + error_description: Optional[str] = None + error_uri: Optional[str] = None + + class RestCatalog(Catalog): uri: str _session: Session @@ -250,22 +278,7 @@ def _create_session(self) -> Session: elif ssl_client_cert := ssl_client.get(CERT): session.cert = ssl_client_cert - if auth_config := self.properties.get(AUTH): - auth_type = auth_config.get("type") - if auth_type is None: - raise ValueError("auth.type must be defined") - auth_type_config = auth_config.get(auth_type, {}) - auth_impl = auth_config.get("impl") - - if auth_type == CUSTOM and not auth_impl: - raise ValueError("auth.impl must be specified when using custom auth.type") - - if auth_type != CUSTOM and auth_impl: - raise ValueError("auth.impl can only be specified when using custom auth.type") - - session.auth = AuthManagerAdapter(AuthManagerFactory.create(auth_impl or auth_type, auth_type_config)) - else: - session.auth = AuthManagerAdapter(self._create_legacy_oauth2_auth_manager(session)) + self._refresh_token(session, self.properties.get(TOKEN)) # Set HTTP headers self._config_headers(session) @@ -276,34 +289,14 @@ def _create_session(self) -> Session: # Mount custom adapters if session_adapters := self.properties.get("session_adapters"): - for prefix, adapter in session_adapters.items(): + for prefix, adapter in session_adapters.items(): # type: ignore session.mount(prefix, adapter) # Add custom auth if session_auth := self.properties.get("session_auth"): - session.auth = session_auth + session.auth = session_auth # type: ignore return session - def _create_legacy_oauth2_auth_manager(self, session: Session) -> AuthManager: - """Create the LegacyOAuth2AuthManager by fetching required properties. - - This will be removed in PyIceberg 1.0 - """ - client_credentials = self.properties.get(CREDENTIAL) - # We want to call `self.auth_url` only when we are using CREDENTIAL - # with the legacy OAUTH2 flow as it will raise a DeprecationWarning - auth_url = self.auth_url if client_credentials is not None else None - - auth_config = { - "session": session, - "auth_url": auth_url, - "credential": client_credentials, - "initial_token": self.properties.get(TOKEN), - "optional_oauth_params": self._extract_optional_oauth_params(), - } - - return AuthManagerFactory.create("legacyoauth2", auth_config) - def _check_valid_namespace_identifier(self, identifier: Union[str, Identifier]) -> Identifier: """Check if the identifier has at least one element.""" identifier_tuple = Catalog.identifier_to_tuple(identifier) @@ -366,6 +359,27 @@ def _extract_optional_oauth_params(self) -> Dict[str, str]: return optional_oauth_param + def _fetch_access_token(self, session: Session, credential: str) -> str: + if SEMICOLON in credential: + client_id, client_secret = credential.split(SEMICOLON) + else: + client_id, client_secret = None, credential + + data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret} + + optional_oauth_params = self._extract_optional_oauth_params() + data.update(optional_oauth_params) + + response = session.post( + url=self.auth_url, data=data, headers={**session.headers, "Content-type": "application/x-www-form-urlencoded"} + ) + try: + response.raise_for_status() + except HTTPError as exc: + self._handle_non_200_response(exc, {400: OAuthError, 401: OAuthError}) + + return TokenResponse(**response.json()).access_token + def _fetch_config(self) -> None: params = {} if warehouse_location := self.properties.get(WAREHOUSE_LOCATION): @@ -376,8 +390,8 @@ def _fetch_config(self) -> None: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {}) - config_response = ConfigResponse.model_validate_json(response.text) + self._handle_non_200_response(exc, {}) + config_response = ConfigResponse(**response.json()) config = config_response.defaults config.update(self.properties) @@ -406,6 +420,58 @@ def _split_identifier_for_json(self, identifier: Union[str, Identifier]) -> Dict identifier_tuple = self._identifier_to_validated_tuple(identifier) return {"namespace": identifier_tuple[:-1], "name": identifier_tuple[-1]} + def _handle_non_200_response(self, exc: HTTPError, error_handler: Dict[int, Type[Exception]]) -> None: + exception: Type[Exception] + + if exc.response is None: + raise ValueError("Did not receive a response") + + code = exc.response.status_code + if code in error_handler: + exception = error_handler[code] + elif code == 400: + exception = BadRequestError + elif code == 401: + exception = UnauthorizedError + elif code == 403: + exception = ForbiddenError + elif code == 422: + exception = RESTError + elif code == 419: + exception = AuthorizationExpiredError + elif code == 501: + exception = NotImplementedError + elif code == 503: + exception = ServiceUnavailableError + elif 500 <= code < 600: + exception = ServerError + else: + exception = RESTError + + try: + if exception == OAuthError: + # The OAuthErrorResponse has a different format + error = OAuthErrorResponse(**exc.response.json()) + response = str(error.error) + if description := error.error_description: + response += f": {description}" + if uri := error.error_uri: + response += f" ({uri})" + else: + error = ErrorResponse(**exc.response.json()).error + response = f"{error.type}: {error.message}" + except JSONDecodeError: + # In the case we don't have a proper response + response = f"RESTError {exc.response.status_code}: Could not decode json payload: {exc.response.text}" + except ValidationError as e: + # In the case we don't have a proper response + errs = ", ".join(err["msg"] for err in e.errors()) + response = ( + f"RESTError {exc.response.status_code}: Received unexpected JSON Payload: {exc.response.text}, errors: {errs}" + ) + + raise exception(response) from exc + def _init_sigv4(self, session: Session) -> None: from urllib import parse @@ -419,17 +485,11 @@ class SigV4Adapter(HTTPAdapter): def __init__(self, **properties: str): super().__init__() self._properties = properties - self._boto_session = boto3.Session( - region_name=get_first_property_value(self._properties, AWS_REGION), - botocore_session=self._properties.get(BOTOCORE_SESSION), - aws_access_key_id=get_first_property_value(self._properties, AWS_ACCESS_KEY_ID), - aws_secret_access_key=get_first_property_value(self._properties, AWS_SECRET_ACCESS_KEY), - aws_session_token=get_first_property_value(self._properties, AWS_SESSION_TOKEN), - ) def add_headers(self, request: PreparedRequest, **kwargs: Any) -> None: # pylint: disable=W0613 - credentials = self._boto_session.get_credentials().get_frozen_credentials() - region = self._properties.get(SIGV4_REGION, self._boto_session.region_name) + boto_session = boto3.Session() + credentials = boto_session.get_credentials().get_frozen_credentials() + region = self._properties.get(SIGV4_REGION, boto_session.region_name) service = self._properties.get(SIGV4_SERVICE, "execute-api") url = str(request.url).split("?")[0] @@ -481,13 +541,16 @@ def _response_to_staged_table(self, identifier_tuple: Tuple[str, ...], table_res catalog=self, ) - def _refresh_token(self) -> None: - # Reactive token refresh is atypical - we should proactively refresh tokens in a separate thread - # instead of retrying on Auth Exceptions. Keeping refresh behavior for the LegacyOAuth2AuthManager - # for backward compatibility - auth_manager = self._session.auth.auth_manager # type: ignore[union-attr] - if isinstance(auth_manager, LegacyOAuth2AuthManager): - auth_manager._refresh_token() + def _refresh_token(self, session: Optional[Session] = None, initial_token: Optional[str] = None) -> None: + session = session or self._session + if initial_token is not None: + self.properties[TOKEN] = initial_token + elif CREDENTIAL in self.properties: + self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) + + # Set Auth token for subsequent calls in the session + if token := self.properties.get(TOKEN): + session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" def _config_headers(self, session: Session) -> None: header_properties = get_header_properties(self.properties) @@ -507,10 +570,7 @@ def _create_table( properties: Properties = EMPTY_DICT, stage_create: bool = False, ) -> TableResponse: - iceberg_schema = self._convert_schema_if_needed( - schema, - int(properties.get(TableProperties.FORMAT_VERSION, TableProperties.DEFAULT_FORMAT_VERSION)), # type: ignore - ) + iceberg_schema = self._convert_schema_if_needed(schema) fresh_schema = assign_fresh_schema_ids(iceberg_schema) fresh_partition_spec = assign_fresh_partition_spec_ids(partition_spec, iceberg_schema, fresh_schema) fresh_sort_order = assign_fresh_sort_order_ids(sort_order, iceberg_schema, fresh_schema) @@ -535,8 +595,8 @@ def _create_table( try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {409: TableAlreadyExistsError, 404: NoSuchNamespaceError}) - return TableResponse.model_validate_json(response.text) + self._handle_non_200_response(exc, {409: TableAlreadyExistsError}) + return TableResponse(**response.json()) @retry(**_RETRY_ARGS) def create_table( @@ -608,9 +668,9 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {409: TableAlreadyExistsError}) + self._handle_non_200_response(exc, {409: TableAlreadyExistsError}) - table_response = TableResponse.model_validate_json(response.text) + table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) @retry(**_RETRY_ARGS) @@ -621,39 +681,29 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchNamespaceError}) - return [(*table.namespace, table.name) for table in ListTablesResponse.model_validate_json(response.text).identifiers] + self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) + return [(*table.namespace, table.name) for table in ListTablesResponse(**response.json()).identifiers] @retry(**_RETRY_ARGS) def load_table(self, identifier: Union[str, Identifier]) -> Table: - params = {} - if mode := self.properties.get(SNAPSHOT_LOADING_MODE): - if mode in {"all", "refs"}: - params["snapshots"] = mode - else: - raise ValueError("Invalid snapshot-loading-mode: {}") - - response = self._session.get( - self.url(Endpoints.load_table, prefixed=True, **self._split_identifier_for_path(identifier)), params=params - ) + response = self._session.get(self.url(Endpoints.load_table, prefixed=True, **self._split_identifier_for_path(identifier))) try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchTableError}) + self._handle_non_200_response(exc, {404: NoSuchTableError}) - table_response = TableResponse.model_validate_json(response.text) + table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) @retry(**_RETRY_ARGS) def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = False) -> None: response = self._session.delete( - self.url(Endpoints.drop_table, prefixed=True, **self._split_identifier_for_path(identifier)), - params={"purgeRequested": purge_requested}, + self.url(Endpoints.drop_table, prefixed=True, purge=purge_requested, **self._split_identifier_for_path(identifier)), ) try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchTableError}) + self._handle_non_200_response(exc, {404: NoSuchTableError}) @retry(**_RETRY_ARGS) def purge_table(self, identifier: Union[str, Identifier]) -> None: @@ -669,7 +719,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchTableError, 409: TableAlreadyExistsError}) + self._handle_non_200_response(exc, {404: NoSuchTableError, 409: TableAlreadyExistsError}) return self.load_table(to_identifier) @@ -692,8 +742,8 @@ def list_views(self, namespace: Union[str, Identifier]) -> List[Identifier]: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchNamespaceError}) - return [(*view.namespace, view.name) for view in ListViewsResponse.model_validate_json(response.text).identifiers] + self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) + return [(*view.namespace, view.name) for view in ListViewsResponse(**response.json()).identifiers] @retry(**_RETRY_ARGS) def commit_table( @@ -730,7 +780,7 @@ def commit_table( try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response( + self._handle_non_200_response( exc, { 409: CommitFailedException, @@ -739,7 +789,7 @@ def commit_table( 504: CommitStateUnknownException, }, ) - return CommitTableResponse.model_validate_json(response.text) + return CommitTableResponse(**response.json()) @retry(**_RETRY_ARGS) def create_namespace(self, namespace: Union[str, Identifier], properties: Properties = EMPTY_DICT) -> None: @@ -749,7 +799,7 @@ def create_namespace(self, namespace: Union[str, Identifier], properties: Proper try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {409: NamespaceAlreadyExistsError}) + self._handle_non_200_response(exc, {409: NamespaceAlreadyExistsError}) @retry(**_RETRY_ARGS) def drop_namespace(self, namespace: Union[str, Identifier]) -> None: @@ -759,7 +809,7 @@ def drop_namespace(self, namespace: Union[str, Identifier]) -> None: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchNamespaceError, 409: NamespaceNotEmptyError}) + self._handle_non_200_response(exc, {404: NoSuchNamespaceError, 409: NamespaceNotEmptyError}) @retry(**_RETRY_ARGS) def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identifier]: @@ -774,9 +824,9 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchNamespaceError}) + self._handle_non_200_response(exc, {}) - return ListNamespaceResponse.model_validate_json(response.text).namespaces + return ListNamespaceResponse(**response.json()).namespaces @retry(**_RETRY_ARGS) def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Properties: @@ -786,9 +836,9 @@ def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Proper try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchNamespaceError}) + self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) - return NamespaceResponse.model_validate_json(response.text).properties + return NamespaceResponse(**response.json()).properties @retry(**_RETRY_ARGS) def update_namespace_properties( @@ -801,8 +851,8 @@ def update_namespace_properties( try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchNamespaceError}) - parsed_response = UpdateNamespacePropertiesResponse.model_validate_json(response.text) + self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) + parsed_response = UpdateNamespacePropertiesResponse(**response.json()) return PropertiesUpdateSummary( removed=parsed_response.removed, updated=parsed_response.updated, @@ -823,7 +873,7 @@ def namespace_exists(self, namespace: Union[str, Identifier]) -> bool: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {}) + self._handle_non_200_response(exc, {}) return False @@ -849,7 +899,7 @@ def table_exists(self, identifier: Union[str, Identifier]) -> bool: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {}) + self._handle_non_200_response(exc, {}) return False @@ -874,7 +924,7 @@ def view_exists(self, identifier: Union[str, Identifier]) -> bool: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {}) + self._handle_non_200_response(exc, {}) return False @@ -886,4 +936,4 @@ def drop_view(self, identifier: Union[str]) -> None: try: response.raise_for_status() except HTTPError as exc: - _handle_non_200_response(exc, {404: NoSuchViewError}) + self._handle_non_200_response(exc, {404: NoSuchViewError}) diff --git a/pyiceberg/catalog/rest/auth.py b/pyiceberg/catalog/rest/auth.py deleted file mode 100644 index ab547d8d55..0000000000 --- a/pyiceberg/catalog/rest/auth.py +++ /dev/null @@ -1,231 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import base64 -import importlib -import logging -from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional, Type - -from requests import HTTPError, PreparedRequest, Session -from requests.auth import AuthBase - -from pyiceberg.catalog.rest.response import TokenResponse, _handle_non_200_response -from pyiceberg.exceptions import OAuthError - -COLON = ":" -logger = logging.getLogger(__name__) - - -class AuthManager(ABC): - """ - Abstract base class for Authentication Managers used to supply authorization headers to HTTP clients (e.g. requests.Session). - - Subclasses must implement the `auth_header` method to return an Authorization header value. - """ - - @abstractmethod - def auth_header(self) -> Optional[str]: - """Return the Authorization header value, or None if not applicable.""" - - -class NoopAuthManager(AuthManager): - """Auth Manager implementation with no auth.""" - - def auth_header(self) -> Optional[str]: - return None - - -class BasicAuthManager(AuthManager): - """AuthManager implementation that supports basic password auth.""" - - def __init__(self, username: str, password: str): - credentials = f"{username}:{password}" - self._token = base64.b64encode(credentials.encode()).decode() - - def auth_header(self) -> str: - return f"Basic {self._token}" - - -class LegacyOAuth2AuthManager(AuthManager): - """Legacy OAuth2 AuthManager implementation. - - This class exists for backward compatibility, and will be removed in - PyIceberg 1.0.0 in favor of OAuth2AuthManager. - """ - - _session: Session - _auth_url: Optional[str] - _token: Optional[str] - _credential: Optional[str] - _optional_oauth_params: Optional[Dict[str, str]] - - def __init__( - self, - session: Session, - auth_url: Optional[str] = None, - credential: Optional[str] = None, - initial_token: Optional[str] = None, - optional_oauth_params: Optional[Dict[str, str]] = None, - ): - self._session = session - self._auth_url = auth_url - self._token = initial_token - self._credential = credential - self._optional_oauth_params = optional_oauth_params - self._refresh_token() - - def _fetch_access_token(self, credential: str) -> str: - if COLON in credential: - client_id, client_secret = credential.split(COLON) - else: - client_id, client_secret = None, credential - - data = {"grant_type": "client_credentials", "client_id": client_id, "client_secret": client_secret} - - if self._optional_oauth_params: - data.update(self._optional_oauth_params) - - if self._auth_url is None: - raise ValueError("Cannot fetch access token from undefined auth_url") - - response = self._session.post( - url=self._auth_url, data=data, headers={**self._session.headers, "Content-type": "application/x-www-form-urlencoded"} - ) - try: - response.raise_for_status() - except HTTPError as exc: - _handle_non_200_response(exc, {400: OAuthError, 401: OAuthError}) - - return TokenResponse.model_validate_json(response.text).access_token - - def _refresh_token(self) -> None: - if self._credential is not None: - self._token = self._fetch_access_token(self._credential) - - def auth_header(self) -> str: - return f"Bearer {self._token}" - - -class GoogleAuthManager(AuthManager): - """An auth manager that is responsible for handling Google credentials.""" - - def __init__(self, credentials_path: Optional[str] = None, scopes: Optional[List[str]] = None): - """ - Initialize GoogleAuthManager. - - Args: - credentials_path: Optional path to Google credentials JSON file. - scopes: Optional list of OAuth2 scopes. - """ - try: - import google.auth - import google.auth.transport.requests - except ImportError as e: - raise ImportError("Google Auth libraries not found. Please install 'google-auth'.") from e - - if credentials_path: - self.credentials, _ = google.auth.load_credentials_from_file(credentials_path, scopes=scopes) - else: - logger.info("Using Google Default Application Credentials") - self.credentials, _ = google.auth.default(scopes=scopes) - self._auth_request = google.auth.transport.requests.Request() - - def auth_header(self) -> str: - self.credentials.refresh(self._auth_request) - return f"Bearer {self.credentials.token}" - - -class AuthManagerAdapter(AuthBase): - """A `requests.auth.AuthBase` adapter that integrates an `AuthManager` into a `requests.Session` to automatically attach the appropriate Authorization header to every request. - - This adapter is useful when working with `requests.Session.auth` - and allows reuse of authentication strategies defined by `AuthManager`. - This AuthManagerAdapter is only intended to be used against the REST Catalog - Server that expects the Authorization Header. - """ - - def __init__(self, auth_manager: AuthManager): - """ - Initialize AuthManagerAdapter. - - Args: - auth_manager (AuthManager): An instance of an AuthManager subclass. - """ - self.auth_manager = auth_manager - - def __call__(self, request: PreparedRequest) -> PreparedRequest: - """ - Modify the outgoing request to include the Authorization header. - - Args: - request (requests.PreparedRequest): The HTTP request being prepared. - - Returns: - requests.PreparedRequest: The modified request with Authorization header. - """ - if auth_header := self.auth_manager.auth_header(): - request.headers["Authorization"] = auth_header - return request - - -class AuthManagerFactory: - _registry: Dict[str, Type["AuthManager"]] = {} - - @classmethod - def register(cls, name: str, auth_manager_class: Type["AuthManager"]) -> None: - """ - Register a string name to a known AuthManager class. - - Args: - name (str): unique name like 'oauth2' to register the AuthManager with - auth_manager_class (Type["AuthManager"]): Implementation of AuthManager - - Returns: - None - """ - cls._registry[name] = auth_manager_class - - @classmethod - def create(cls, class_or_name: str, config: Dict[str, Any]) -> AuthManager: - """ - Create an AuthManager by name or fully-qualified class path. - - Args: - class_or_name (str): Either a name like 'oauth2' or a full class path like 'my.module.CustomAuthManager' - config (Dict[str, Any]): Configuration passed to the AuthManager constructor - - Returns: - AuthManager: An instantiated AuthManager subclass - """ - if class_or_name in cls._registry: - manager_cls = cls._registry[class_or_name] - else: - try: - module_path, class_name = class_or_name.rsplit(".", 1) - module = importlib.import_module(module_path) - manager_cls = getattr(module, class_name) - except Exception as err: - raise ValueError(f"Could not load AuthManager class for '{class_or_name}'") from err - - return manager_cls(**config) - - -AuthManagerFactory.register("noop", NoopAuthManager) -AuthManagerFactory.register("basic", BasicAuthManager) -AuthManagerFactory.register("legacyoauth2", LegacyOAuth2AuthManager) -AuthManagerFactory.register("google", GoogleAuthManager) diff --git a/pyiceberg/catalog/rest/response.py b/pyiceberg/catalog/rest/response.py deleted file mode 100644 index 8f23af8c35..0000000000 --- a/pyiceberg/catalog/rest/response.py +++ /dev/null @@ -1,111 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from json import JSONDecodeError -from typing import Dict, Literal, Optional, Type - -from pydantic import Field, ValidationError -from requests import HTTPError - -from pyiceberg.exceptions import ( - AuthorizationExpiredError, - BadRequestError, - ForbiddenError, - OAuthError, - RESTError, - ServerError, - ServiceUnavailableError, - UnauthorizedError, -) -from pyiceberg.typedef import IcebergBaseModel - - -class TokenResponse(IcebergBaseModel): - access_token: str = Field() - token_type: str = Field() - expires_in: Optional[int] = Field(default=None) - issued_token_type: Optional[str] = Field(default=None) - refresh_token: Optional[str] = Field(default=None) - scope: Optional[str] = Field(default=None) - - -class ErrorResponseMessage(IcebergBaseModel): - message: str = Field() - type: str = Field() - code: int = Field() - - -class ErrorResponse(IcebergBaseModel): - error: ErrorResponseMessage = Field() - - -class OAuthErrorResponse(IcebergBaseModel): - error: Literal[ - "invalid_request", "invalid_client", "invalid_grant", "unauthorized_client", "unsupported_grant_type", "invalid_scope" - ] - error_description: Optional[str] = None - error_uri: Optional[str] = None - - -def _handle_non_200_response(exc: HTTPError, error_handler: Dict[int, Type[Exception]]) -> None: - exception: Type[Exception] - - if exc.response is None: - raise ValueError("Did not receive a response") - - code = exc.response.status_code - if code in error_handler: - exception = error_handler[code] - elif code == 400: - exception = BadRequestError - elif code == 401: - exception = UnauthorizedError - elif code == 403: - exception = ForbiddenError - elif code == 422: - exception = RESTError - elif code == 419: - exception = AuthorizationExpiredError - elif code == 501: - exception = NotImplementedError - elif code == 503: - exception = ServiceUnavailableError - elif 500 <= code < 600: - exception = ServerError - else: - exception = RESTError - - try: - if exception == OAuthError: - # The OAuthErrorResponse has a different format - error = OAuthErrorResponse.model_validate_json(exc.response.text) - response = str(error.error) - if description := error.error_description: - response += f": {description}" - if uri := error.error_uri: - response += f" ({uri})" - else: - error = ErrorResponse.model_validate_json(exc.response.text).error - response = f"{error.type}: {error.message}" - except JSONDecodeError: - # In the case we don't have a proper response - response = f"RESTError {exc.response.status_code}: Could not decode json payload: {exc.response.text}" - except ValidationError as e: - # In the case we don't have a proper response - errs = ", ".join(err["msg"] for err in e.errors()) - response = f"RESTError {exc.response.status_code}: Received unexpected JSON Payload: {exc.response.text}, errors: {errs}" - - raise exception(response) from exc diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 880a4db481..e656fbed64 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -44,7 +44,6 @@ from pyiceberg.catalog import ( METADATA_LOCATION, - URI, Catalog, MetastoreCatalog, PropertiesUpdateSummary, @@ -120,7 +119,7 @@ class SqlCatalog(MetastoreCatalog): def __init__(self, name: str, **properties: str): super().__init__(name, **properties) - if not (uri_prop := self.properties.get(URI)): + if not (uri_prop := self.properties.get("uri")): raise NoSuchPropertyException("SQL connection URI is required") echo_str = str(self.properties.get("echo", DEFAULT_ECHO_VALUE)).lower() @@ -620,28 +619,15 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi table_stmt = select(IcebergTables.table_namespace).where(IcebergTables.catalog_name == self.name) namespace_stmt = select(IcebergNamespaceProperties.namespace).where(IcebergNamespaceProperties.catalog_name == self.name) if namespace: - namespace_like = Catalog.namespace_to_string(namespace, NoSuchNamespaceError) + "%" - table_stmt = table_stmt.where(IcebergTables.table_namespace.like(namespace_like)) - namespace_stmt = namespace_stmt.where(IcebergNamespaceProperties.namespace.like(namespace_like)) + namespace_str = Catalog.namespace_to_string(namespace, NoSuchNamespaceError) + table_stmt = table_stmt.where(IcebergTables.table_namespace.like(namespace_str)) + namespace_stmt = namespace_stmt.where(IcebergNamespaceProperties.namespace.like(namespace_str)) stmt = union( table_stmt, namespace_stmt, ) with Session(self.engine) as session: - namespace_tuple = Catalog.identifier_to_tuple(namespace) - sub_namespaces_level_length = len(namespace_tuple) + 1 - - namespaces = list( - { # only get distinct namespaces - ns[:sub_namespaces_level_length] # truncate to the required level - for ns in {Catalog.identifier_to_tuple(ns) for ns in session.execute(stmt).scalars()} - if len(ns) >= sub_namespaces_level_length # only get sub namespaces/children - and ns[: sub_namespaces_level_length - 1] == namespace_tuple - # exclude fuzzy matches when `namespace` contains `%` or `_` - } - ) - - return namespaces + return [Catalog.identifier_to_tuple(namespace_col) for namespace_col in session.execute(stmt).scalars()] def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Properties: """Get properties for a namespace. diff --git a/pyiceberg/cli/console.py b/pyiceberg/cli/console.py index d918f87918..83e67a3cbb 100644 --- a/pyiceberg/cli/console.py +++ b/pyiceberg/cli/console.py @@ -29,11 +29,11 @@ from click import Context from pyiceberg import __version__ -from pyiceberg.catalog import URI, Catalog, load_catalog +from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.cli.output import ConsoleOutput, JsonOutput, Output from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchPropertyException, NoSuchTableError from pyiceberg.table import TableProperties -from pyiceberg.table.refs import SnapshotRef, SnapshotRefType +from pyiceberg.table.refs import SnapshotRef from pyiceberg.utils.properties import property_as_int @@ -75,7 +75,7 @@ def run( if ugi: properties["ugi"] = ugi if uri: - properties[URI] = uri + properties["uri"] = uri if credential: properties["credential"] = credential @@ -300,6 +300,7 @@ def get_namespace(ctx: Context, identifier: str, property_name: str) -> None: identifier_tuple = Catalog.identifier_to_tuple(identifier) namespace_properties = catalog.load_namespace_properties(identifier_tuple) + assert namespace_properties if property_name: if property_value := namespace_properties.get(property_name): @@ -321,6 +322,7 @@ def get_table(ctx: Context, identifier: str, property_name: str) -> None: identifier_tuple = Catalog.identifier_to_tuple(identifier) metadata = catalog.load_table(identifier_tuple).metadata + assert metadata if property_name: if property_value := metadata.properties.get(property_name): @@ -361,10 +363,9 @@ def table(ctx: Context, identifier: str, property_name: str, property_value: str catalog, output = _catalog_and_output(ctx) identifier_tuple = Catalog.identifier_to_tuple(identifier) - table = catalog.load_table(identifier_tuple) - with table.transaction() as tx: - tx.set_properties({property_name: property_value}) - output.text(f"Set {property_name}={property_value} on {identifier}") + _ = catalog.load_table(identifier_tuple) + output.text(f"Setting {property_name}={property_value} on {identifier}") + raise NotImplementedError("Writing is WIP") @properties.group() @@ -399,9 +400,8 @@ def table(ctx: Context, identifier: str, property_name: str) -> None: # noqa: F catalog, output = _catalog_and_output(ctx) table = catalog.load_table(identifier) if property_name in table.metadata.properties: - with table.transaction() as tx: - tx.remove_properties(property_name) - output.text(f"Property {property_name} removed from {identifier}") + output.exception(NotImplementedError("Writing is WIP")) + ctx.exit(1) else: raise NoSuchPropertyException(f"Property {property_name} does not exist on {identifier}") @@ -419,7 +419,7 @@ def list_refs(ctx: Context, identifier: str, type: str, verbose: bool) -> None: refs = table.refs() if type: type = type.lower() - if type not in {SnapshotRefType.BRANCH, SnapshotRefType.TAG}: + if type not in {"branch", "tag"}: raise ValueError(f"Type must be either branch or tag, got: {type}") relevant_refs = [ @@ -433,7 +433,7 @@ def list_refs(ctx: Context, identifier: str, type: str, verbose: bool) -> None: def _retention_properties(ref: SnapshotRef, table_properties: Dict[str, str]) -> Dict[str, str]: retention_properties = {} - if ref.snapshot_ref_type == SnapshotRefType.BRANCH: + if ref.snapshot_ref_type == "branch": default_min_snapshots_to_keep = property_as_int( table_properties, TableProperties.MIN_SNAPSHOTS_TO_KEEP, diff --git a/pyiceberg/conversions.py b/pyiceberg/conversions.py index 7bf7b462e2..de67cdfff0 100644 --- a/pyiceberg/conversions.py +++ b/pyiceberg/conversions.py @@ -20,7 +20,6 @@ - Converting partition strings to built-in python objects. - Converting a value to a byte buffer. - Converting a byte buffer to a value. - - Converting a json-single field serialized field Note: Conversion logic varies based on the PrimitiveType implementation. Therefore conversion functions @@ -29,7 +28,6 @@ implementations that share the same conversion logic, registrations can be stacked. """ -import codecs import uuid from datetime import date, datetime, time from decimal import Decimal @@ -55,33 +53,13 @@ LongType, PrimitiveType, StringType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, strtobool, ) -from pyiceberg.utils.datetime import ( - date_str_to_days, - date_to_days, - datetime_to_micros, - datetime_to_nanos, - days_to_date, - micros_to_time, - micros_to_timestamp, - micros_to_timestamptz, - time_str_to_micros, - time_to_micros, - timestamp_to_micros, - timestamptz_to_micros, - to_human_day, - to_human_time, - to_human_timestamp, - to_human_timestamptz, -) +from pyiceberg.utils.datetime import date_to_days, datetime_to_micros, time_to_micros from pyiceberg.utils.decimal import decimal_to_bytes, unscaled_to_decimal _BOOL_STRUCT = Struct(" Union[int, float, str, uui @partition_to_py.register(DateType) @partition_to_py.register(TimeType) @partition_to_py.register(TimestampType) -@partition_to_py.register(TimestampNanoType) @partition_to_py.register(TimestamptzType) -@partition_to_py.register(TimestamptzNanoType) @handle_none def _(primitive_type: PrimitiveType, value_str: str) -> int: """Convert a string to an integer value. @@ -178,12 +154,6 @@ def _(_: DecimalType, value_str: str) -> Decimal: return Decimal(value_str) -@partition_to_py.register(UnknownType) -@handle_none -def _(type_: UnknownType, _: str) -> None: - return None - - @singledispatch def to_bytes( primitive_type: PrimitiveType, _: Union[bool, bytes, Decimal, date, datetime, float, int, str, time, uuid.UUID] @@ -218,20 +188,12 @@ def _(_: PrimitiveType, value: int) -> bytes: @to_bytes.register(TimestampType) @to_bytes.register(TimestamptzType) -def _(_: PrimitiveType, value: Union[datetime, int]) -> bytes: +def _(_: TimestampType, value: Union[datetime, int]) -> bytes: if isinstance(value, datetime): value = datetime_to_micros(value) return _LONG_STRUCT.pack(value) -@to_bytes.register(TimestampNanoType) -@to_bytes.register(TimestamptzNanoType) -def _(_: PrimitiveType, value: Union[datetime, int]) -> bytes: - if isinstance(value, datetime): - value = datetime_to_nanos(value) - return _LONG_STRUCT.pack(value) - - @to_bytes.register(DateType) def _(_: DateType, value: Union[date, int]) -> bytes: if isinstance(value, date): @@ -314,7 +276,7 @@ def from_bytes(primitive_type: PrimitiveType, b: bytes) -> L: # type: ignore primitive_type (PrimitiveType): An implementation of the PrimitiveType base class. b (bytes): The bytes to convert. """ - raise TypeError(f"Cannot deserialize bytes, type {primitive_type} not supported: {b!r}") + raise TypeError(f"Cannot deserialize bytes, type {primitive_type} not supported: {str(b)}") @from_bytes.register(BooleanType) @@ -332,8 +294,6 @@ def _(_: PrimitiveType, b: bytes) -> int: @from_bytes.register(TimeType) @from_bytes.register(TimestampType) @from_bytes.register(TimestamptzType) -@from_bytes.register(TimestampNanoType) -@from_bytes.register(TimestamptzNanoType) def _(_: PrimitiveType, b: bytes) -> int: return _LONG_STRUCT.unpack(b)[0] @@ -364,234 +324,3 @@ def _(_: PrimitiveType, b: bytes) -> bytes: def _(primitive_type: DecimalType, buf: bytes) -> Decimal: unscaled = int.from_bytes(buf, "big", signed=True) return unscaled_to_decimal(unscaled, primitive_type.scale) - - -@from_bytes.register(UnknownType) -def _(type_: UnknownType, buf: bytes) -> None: - return None - - -@singledispatch # type: ignore -def to_json(primitive_type: PrimitiveType, val: Any) -> L: # type: ignore - """Convert built-in python values into JSON value types. - - https://iceberg.apache.org/spec/#json-single-value-serialization - - Args: - primitive_type (PrimitiveType): An implementation of the PrimitiveType base class. - val (Any): The arbitrary built-in value to convert into the right form - """ - raise TypeError(f"Cannot deserialize bytes, type {primitive_type} not supported: {val}") - - -@to_json.register(BooleanType) -def _(_: BooleanType, val: bool) -> bool: - """Python bool automatically converts into a JSON bool.""" - return val - - -@to_json.register(IntegerType) -@to_json.register(LongType) -def _(_: Union[IntegerType, LongType], val: int) -> int: - """Python int automatically converts to a JSON int.""" - return val - - -@to_json.register(DateType) -def _(_: DateType, val: Union[date, int]) -> str: - """JSON date is string encoded.""" - if isinstance(val, date): - val = date_to_days(val) - return to_human_day(val) - - -@to_json.register(TimeType) -def _(_: TimeType, val: Union[int, time]) -> str: - """Python time or microseconds since epoch serializes into an ISO8601 time.""" - if isinstance(val, time): - val = time_to_micros(val) - return to_human_time(val) - - -@to_json.register(TimestampType) -def _(_: PrimitiveType, val: Union[int, datetime]) -> str: - """Python datetime (without timezone) or microseconds since epoch serializes into an ISO8601 timestamp.""" - if isinstance(val, datetime): - val = datetime_to_micros(val) - - return to_human_timestamp(val) - - -@to_json.register(TimestamptzType) -def _(_: TimestamptzType, val: Union[int, datetime]) -> str: - """Python datetime (with timezone) or microseconds since epoch serializes into an ISO8601 timestamp.""" - if isinstance(val, datetime): - val = datetime_to_micros(val) - return to_human_timestamptz(val) - - -@to_json.register(FloatType) -@to_json.register(DoubleType) -def _(_: Union[FloatType, DoubleType], val: float) -> float: - """Float serializes into JSON float.""" - return val - - -@to_json.register(StringType) -def _(_: StringType, val: str) -> str: - """Python string serializes into JSON string.""" - return val - - -@to_json.register(FixedType) -def _(t: FixedType, b: bytes) -> str: - """Python bytes serializes into hexadecimal encoded string.""" - if len(t) != len(b): - raise ValueError(f"FixedType has length {len(t)}, which is different from the value: {len(b)}") - - return codecs.encode(b, "hex").decode(UTF8) - - -@to_json.register(BinaryType) -def _(_: BinaryType, b: bytes) -> str: - """Python bytes serializes into hexadecimal encoded string.""" - return codecs.encode(b, "hex").decode(UTF8) - - -@to_json.register(DecimalType) -def _(_: DecimalType, val: Decimal) -> str: - """Python decimal serializes into string. - - Stores the string representation of the decimal value, specifically, for - values with a positive scale, the number of digits to the right of the - decimal point is used to indicate scale, for values with a negative scale, - the scientific notation is used and the exponent must equal the negated scale. - """ - return str(val) - - -@to_json.register(UUIDType) -def _(_: UUIDType, val: uuid.UUID) -> str: - """Serialize into a JSON string.""" - return str(val) - - -@singledispatch # type: ignore -def from_json(primitive_type: PrimitiveType, val: Any) -> L: # type: ignore - """Convert JSON value types into built-in python values. - - https://iceberg.apache.org/spec/#json-single-value-serialization - - Args: - primitive_type (PrimitiveType): An implementation of the PrimitiveType base class. - val (Any): The arbitrary JSON value to convert into the right form - """ - raise TypeError(f"Cannot deserialize bytes, type {primitive_type} not supported: {str(val)}") - - -@from_json.register(BooleanType) -def _(_: BooleanType, val: bool) -> bool: - """JSON bool automatically converts into a Python bool.""" - return val - - -@from_json.register(IntegerType) -@from_json.register(LongType) -def _(_: Union[IntegerType, LongType], val: int) -> int: - """JSON int automatically converts to a Python int.""" - return val - - -@from_json.register(DateType) -def _(_: DateType, val: Union[str, int, date]) -> date: - """JSON date is string encoded.""" - if isinstance(val, str): - val = date_str_to_days(val) - if isinstance(val, int): - return days_to_date(val) - else: - return val - - -@from_json.register(TimeType) -def _(_: TimeType, val: Union[str, int, time]) -> time: - """JSON ISO8601 string into Python time.""" - if isinstance(val, str): - val = time_str_to_micros(val) - if isinstance(val, int): - return micros_to_time(val) - else: - return val - - -@from_json.register(TimestampType) -def _(_: PrimitiveType, val: Union[str, int, datetime]) -> datetime: - """JSON ISO8601 string into Python datetime.""" - if isinstance(val, str): - val = timestamp_to_micros(val) - if isinstance(val, int): - return micros_to_timestamp(val) - else: - return val - - -@from_json.register(TimestamptzType) -def _(_: TimestamptzType, val: Union[str, int, datetime]) -> datetime: - """JSON ISO8601 string into Python datetime.""" - if isinstance(val, str): - val = timestamptz_to_micros(val) - if isinstance(val, int): - return micros_to_timestamptz(val) - else: - return val - - -@from_json.register(FloatType) -@from_json.register(DoubleType) -def _(_: Union[FloatType, DoubleType], val: float) -> float: - """JSON float deserializes into a Python float.""" - return val - - -@from_json.register(StringType) -def _(_: StringType, val: str) -> str: - """JSON string serializes into a Python string.""" - return val - - -@from_json.register(FixedType) -def _(t: FixedType, val: Union[str, bytes]) -> bytes: - """JSON hexadecimal encoded string into bytes.""" - if isinstance(val, str): - val = codecs.decode(val.encode(UTF8), "hex") - - if len(t) != len(val): - raise ValueError(f"FixedType has length {len(t)}, which is different from the value: {len(val)}") - - return val - - -@from_json.register(BinaryType) -def _(_: BinaryType, val: Union[bytes, str]) -> bytes: - """JSON hexadecimal encoded string into bytes.""" - if isinstance(val, str): - return codecs.decode(val.encode(UTF8), "hex") - else: - return val - - -@from_json.register(DecimalType) -def _(_: DecimalType, val: str) -> Decimal: - """Convert JSON string into a Python Decimal.""" - return Decimal(val) - - -@from_json.register(UUIDType) -def _(_: UUIDType, val: Union[str, bytes, uuid.UUID]) -> uuid.UUID: - """Convert JSON string into Python UUID.""" - if isinstance(val, str): - return uuid.UUID(val) - elif isinstance(val, bytes): - return uuid.UUID(bytes=val) - else: - return val diff --git a/pyiceberg/exceptions.py b/pyiceberg/exceptions.py index c80f104e46..56574ff471 100644 --- a/pyiceberg/exceptions.py +++ b/pyiceberg/exceptions.py @@ -122,7 +122,3 @@ class CommitStateUnknownException(RESTError): class WaitingForLockException(Exception): """Need to wait for a lock, try again.""" - - -class ValidationException(Exception): - """Raised when validation fails.""" diff --git a/pyiceberg/expressions/__init__.py b/pyiceberg/expressions/__init__.py index 2adf898fea..830637aa99 100644 --- a/pyiceberg/expressions/__init__.py +++ b/pyiceberg/expressions/__init__.py @@ -18,13 +18,11 @@ from __future__ import annotations from abc import ABC, abstractmethod -from functools import cached_property +from functools import cached_property, reduce from typing import ( Any, - Callable, Generic, Iterable, - Sequence, Set, Tuple, Type, @@ -66,59 +64,6 @@ class BooleanExpression(ABC): def __invert__(self) -> BooleanExpression: """Transform the Expression into its negated version.""" - def __and__(self, other: BooleanExpression) -> BooleanExpression: - """Perform and operation on another expression.""" - if not isinstance(other, BooleanExpression): - raise ValueError(f"Expected BooleanExpression, got: {other}") - - return And(self, other) - - def __or__(self, other: BooleanExpression) -> BooleanExpression: - """Perform or operation on another expression.""" - if not isinstance(other, BooleanExpression): - raise ValueError(f"Expected BooleanExpression, got: {other}") - - return Or(self, other) - - -def _build_balanced_tree( - operator_: Callable[[BooleanExpression, BooleanExpression], BooleanExpression], items: Sequence[BooleanExpression] -) -> BooleanExpression: - """ - Recursively constructs a balanced binary tree of BooleanExpressions using the provided binary operator. - - This function is a safer and more scalable alternative to: - reduce(operator_, items) - - Using `reduce` creates a deeply nested, unbalanced tree (e.g., operator_(a, operator_(b, operator_(c, ...)))), - which grows linearly with the number of items. This can lead to RecursionError exceptions in Python - when the number of expressions is large (e.g., >1000). - - In contrast, this function builds a balanced binary tree with logarithmic depth (O(log n)), - helping avoid recursion issues and ensuring that expression trees remain stable, predictable, - and safe to traverse — especially in tools like PyIceberg that operate on large logical trees. - - Parameters: - operator_ (Callable): A binary operator function (e.g., pyiceberg.expressions.Or, And) that takes two - BooleanExpressions and returns a combined BooleanExpression. - items (Sequence[BooleanExpression]): A sequence of BooleanExpression objects to combine. - - Returns: - BooleanExpression: The balanced combination of all input BooleanExpressions. - - Raises: - ValueError: If the input sequence is empty. - """ - if not items: - raise ValueError("No expressions to combine") - if len(items) == 1: - return items[0] - mid = len(items) // 2 - - left = _build_balanced_tree(operator_, items[:mid]) - right = _build_balanced_tree(operator_, items[mid:]) - return operator_(left, right) - class Term(Generic[L], ABC): """A simple expression that evaluates to a value.""" @@ -255,7 +200,7 @@ class And(BooleanExpression): def __new__(cls, left: BooleanExpression, right: BooleanExpression, *rest: BooleanExpression) -> BooleanExpression: # type: ignore if rest: - return _build_balanced_tree(And, (left, right, *rest)) + return reduce(And, (left, right, *rest)) if left is AlwaysFalse() or right is AlwaysFalse(): return AlwaysFalse() elif left is AlwaysTrue(): @@ -298,7 +243,7 @@ class Or(BooleanExpression): def __new__(cls, left: BooleanExpression, right: BooleanExpression, *rest: BooleanExpression) -> BooleanExpression: # type: ignore if rest: - return _build_balanced_tree(Or, (left, right, *rest)) + return reduce(Or, (left, right, *rest)) if left is AlwaysTrue() or right is AlwaysTrue(): return AlwaysTrue() elif left is AlwaysFalse(): diff --git a/pyiceberg/expressions/literals.py b/pyiceberg/expressions/literals.py index 921e24e29f..b29d0d9e48 100644 --- a/pyiceberg/expressions/literals.py +++ b/pyiceberg/expressions/literals.py @@ -23,7 +23,7 @@ import struct from abc import ABC, abstractmethod -from datetime import date, datetime, time +from datetime import date, datetime from decimal import ROUND_HALF_UP, Decimal from functools import singledispatchmethod from math import isnan @@ -54,7 +54,6 @@ datetime_to_micros, micros_to_days, time_str_to_micros, - time_to_micros, timestamp_to_micros, timestamptz_to_micros, ) @@ -153,8 +152,6 @@ def literal(value: L) -> Literal[L]: return TimestampLiteral(datetime_to_micros(value)) # type: ignore elif isinstance(value, date): return DateLiteral(date_to_days(value)) # type: ignore - elif isinstance(value, time): - return TimeLiteral(time_to_micros(value)) # type: ignore else: raise TypeError(f"Invalid literal value: {repr(value)}") @@ -262,7 +259,7 @@ def __init__(self, value: bool) -> None: super().__init__(value, bool) @singledispatchmethod - def to(self, type_var: IcebergType) -> Literal[bool]: + def to(self, type_var: IcebergType) -> Literal[bool]: # type: ignore raise TypeError(f"Cannot convert BooleanLiteral into {type_var}") @to.register(BooleanType) @@ -606,26 +603,6 @@ def _(self, type_var: BooleanType) -> Literal[bool]: else: raise ValueError(f"Could not convert {self.value} into a {type_var}") - @to.register(FloatType) - def _(self, type_var: FloatType) -> Literal[float]: - try: - number = float(self.value) - if FloatType.max < number: - return FloatAboveMax() - elif FloatType.min > number: - return FloatBelowMin() - return FloatLiteral(number) - except ValueError as e: - raise ValueError(f"Could not convert {self.value} into a {type_var}") from e - - @to.register(DoubleType) - def _(self, type_var: DoubleType) -> Literal[float]: - try: - number = float(self.value) - return DoubleLiteral(number) - except ValueError as e: - raise ValueError(f"Could not convert {self.value} into a {type_var}") from e - def __repr__(self) -> str: """Return the string representation of the StringLiteral class.""" return f"literal({repr(self.value)})" diff --git a/pyiceberg/expressions/parser.py b/pyiceberg/expressions/parser.py index b9b6f9aba7..056defefb4 100644 --- a/pyiceberg/expressions/parser.py +++ b/pyiceberg/expressions/parser.py @@ -22,10 +22,8 @@ DelimitedList, Group, MatchFirst, - ParseException, ParserElement, ParseResults, - QuotedString, Suppress, Word, alphanums, @@ -68,6 +66,7 @@ ) from pyiceberg.typedef import L from pyiceberg.types import strtobool +from pyiceberg.utils.deprecated import deprecation_message ParserElement.enablePackrat() @@ -81,16 +80,7 @@ LIKE = CaselessKeyword("like") unquoted_identifier = Word(alphas + "_", alphanums + "_$") -quoted_identifier = QuotedString('"', escChar="\\", unquoteResults=True) - - -@quoted_identifier.set_parse_action -def validate_quoted_identifier(result: ParseResults) -> str: - if "." in result[0]: - raise ParseException("Expected '\"', found '.'") - return result[0] - - +quoted_identifier = Suppress('"') + unquoted_identifier + Suppress('"') identifier = MatchFirst([unquoted_identifier, quoted_identifier]).set_results_name("identifier") column = DelimitedList(identifier, delim=".", combine=False).set_results_name("column") @@ -99,7 +89,15 @@ def validate_quoted_identifier(result: ParseResults) -> str: @column.set_parse_action def _(result: ParseResults) -> Reference: - return Reference(".".join(result.column)) + if len(result.column) > 1: + deprecation_message( + deprecated_in="0.8.0", + removed_in="0.9.0", + help_message="Parsing expressions with table name is deprecated. Only provide field names in the row_filter.", + ) + # TODO: Once this is removed, we will no longer take just the last index of parsed column result + # And introduce support for parsing filter expressions with nested fields. + return Reference(result.column[-1]) boolean = one_of(["true", "false"], caseless=True).set_results_name("boolean") diff --git a/pyiceberg/expressions/visitors.py b/pyiceberg/expressions/visitors.py index a6268c0d48..abac19bc19 100644 --- a/pyiceberg/expressions/visitors.py +++ b/pyiceberg/expressions/visitors.py @@ -861,7 +861,6 @@ class _ColumnNameTranslator(BooleanExpressionVisitor[BooleanExpression]): Args: file_schema (Schema): The schema of the file. case_sensitive (bool): Whether to consider case when binding a reference to a field in a schema, defaults to True. - projected_field_values (Dict[int, Any]): Values for projected fields not present in the data file. Raises: TypeError: In the case of an UnboundPredicate. @@ -870,12 +869,10 @@ class _ColumnNameTranslator(BooleanExpressionVisitor[BooleanExpression]): file_schema: Schema case_sensitive: bool - projected_field_values: Dict[int, Any] - def __init__(self, file_schema: Schema, case_sensitive: bool, projected_field_values: Dict[int, Any] = EMPTY_DICT) -> None: + def __init__(self, file_schema: Schema, case_sensitive: bool) -> None: self.file_schema = file_schema self.case_sensitive = case_sensitive - self.projected_field_values = projected_field_values def visit_true(self) -> BooleanExpression: return AlwaysTrue() @@ -896,34 +893,15 @@ def visit_unbound_predicate(self, predicate: UnboundPredicate[L]) -> BooleanExpr raise TypeError(f"Expected Bound Predicate, got: {predicate.term}") def visit_bound_predicate(self, predicate: BoundPredicate[L]) -> BooleanExpression: - field = predicate.term.ref().field - field_id = field.field_id - file_column_name = self.file_schema.find_column_name(field_id) + file_column_name = self.file_schema.find_column_name(predicate.term.ref().field.field_id) if file_column_name is None: - # In the case of schema evolution or column projection, the field might not be present in the file schema. - # we can use the projected value or the field's default value as a constant and evaluate it against the predicate - pred: BooleanExpression - if isinstance(predicate, BoundUnaryPredicate): - pred = predicate.as_unbound(field.name) - elif isinstance(predicate, BoundLiteralPredicate): - pred = predicate.as_unbound(field.name, predicate.literal) - elif isinstance(predicate, BoundSetPredicate): - pred = predicate.as_unbound(field.name, predicate.literals) + # In the case of schema evolution, the column might not be present + # in the file schema when reading older data + if isinstance(predicate, BoundIsNull): + return AlwaysTrue() else: - raise ValueError(f"Unsupported predicate: {predicate}") - - # In the order described by the "Column Projection" section of the Iceberg spec: - # https://iceberg.apache.org/spec/#column-projection - # Evaluate column projection first if it exists, otherwise default to the initial-default-value - field_value = ( - self.projected_field_values[field_id] if field.field_id in self.projected_field_values else field.initial_default - ) - return ( - AlwaysTrue() - if expression_evaluator(Schema(field), pred, case_sensitive=self.case_sensitive)(Record(field_value)) - else AlwaysFalse() - ) + return AlwaysFalse() if isinstance(predicate, BoundUnaryPredicate): return predicate.as_unbound(file_column_name) @@ -935,10 +913,8 @@ def visit_bound_predicate(self, predicate: BoundPredicate[L]) -> BooleanExpressi raise ValueError(f"Unsupported predicate: {predicate}") -def translate_column_names( - expr: BooleanExpression, file_schema: Schema, case_sensitive: bool = True, projected_field_values: Dict[int, Any] = EMPTY_DICT -) -> BooleanExpression: - return visit(expr, _ColumnNameTranslator(file_schema, case_sensitive, projected_field_values)) +def translate_column_names(expr: BooleanExpression, file_schema: Schema, case_sensitive: bool) -> BooleanExpression: + return visit(expr, _ColumnNameTranslator(file_schema, case_sensitive)) class _ExpressionFieldIDs(BooleanExpressionVisitor[Set[int]]): diff --git a/pyiceberg/io/__init__.py b/pyiceberg/io/__init__.py index f89de18f12..ac25c2d767 100644 --- a/pyiceberg/io/__init__.py +++ b/pyiceberg/io/__init__.py @@ -27,6 +27,7 @@ import importlib import logging +import os import warnings from abc import ABC, abstractmethod from io import SEEK_SET @@ -36,6 +37,7 @@ List, Optional, Protocol, + Tuple, Type, Union, runtime_checkable, @@ -57,35 +59,26 @@ S3_SECRET_ACCESS_KEY = "s3.secret-access-key" S3_SESSION_TOKEN = "s3.session-token" S3_REGION = "s3.region" -S3_RESOLVE_REGION = "s3.resolve-region" S3_PROXY_URI = "s3.proxy-uri" S3_CONNECT_TIMEOUT = "s3.connect-timeout" S3_REQUEST_TIMEOUT = "s3.request-timeout" -S3_SIGNER = "s3.signer" S3_SIGNER_URI = "s3.signer.uri" S3_SIGNER_ENDPOINT = "s3.signer.endpoint" S3_SIGNER_ENDPOINT_DEFAULT = "v1/aws/s3/sign" S3_ROLE_ARN = "s3.role-arn" S3_ROLE_SESSION_NAME = "s3.role-session-name" S3_FORCE_VIRTUAL_ADDRESSING = "s3.force-virtual-addressing" -S3_RETRY_STRATEGY_IMPL = "s3.retry-strategy-impl" HDFS_HOST = "hdfs.host" HDFS_PORT = "hdfs.port" HDFS_USER = "hdfs.user" HDFS_KERB_TICKET = "hdfs.kerberos_ticket" ADLS_CONNECTION_STRING = "adls.connection-string" -ADLS_CREDENTIAL = "adls.credential" ADLS_ACCOUNT_NAME = "adls.account-name" ADLS_ACCOUNT_KEY = "adls.account-key" ADLS_SAS_TOKEN = "adls.sas-token" ADLS_TENANT_ID = "adls.tenant-id" ADLS_CLIENT_ID = "adls.client-id" -ADLS_CLIENT_SECRET = "adls.client-secret" -ADLS_ACCOUNT_HOST = "adls.account-host" -ADLS_BLOB_STORAGE_AUTHORITY = "adls.blob-storage-authority" -ADLS_DFS_STORAGE_AUTHORITY = "adls.dfs-storage-authority" -ADLS_BLOB_STORAGE_SCHEME = "adls.blob-storage-scheme" -ADLS_DFS_STORAGE_SCHEME = "adls.dfs-storage-scheme" +ADLS_ClIENT_SECRET = "adls.client-secret" GCS_TOKEN = "gcs.oauth2.token" GCS_TOKEN_EXPIRES_AT_MS = "gcs.oauth2.token-expires-at" GCS_PROJECT_ID = "gcs.project-id" @@ -97,8 +90,6 @@ GCS_SERVICE_HOST = "gcs.service.host" GCS_DEFAULT_LOCATION = "gcs.default-bucket-location" GCS_VERSION_AWARE = "gcs.version-aware" -HF_ENDPOINT = "hf.endpoint" -HF_TOKEN = "hf.token" PYARROW_USE_LARGE_TYPES_ON_READ = "pyarrow.use-large-types-on-read" @@ -313,7 +304,6 @@ def delete(self, location: Union[str, InputFile, OutputFile]) -> None: "viewfs": [ARROW_FILE_IO], "abfs": [FSSPEC_FILE_IO], "abfss": [FSSPEC_FILE_IO], - "hf": [FSSPEC_FILE_IO], } @@ -375,3 +365,14 @@ def load_file_io(properties: Properties = EMPTY_DICT, location: Optional[str] = raise ModuleNotFoundError( 'Could not load a FileIO, please consider installing one: pip3 install "pyiceberg[pyarrow]", for more options refer to the docs.' ) from e + + +def _parse_location(location: str) -> Tuple[str, str, str]: + """Return the path without the scheme.""" + uri = urlparse(location) + if not uri.scheme: + return "file", uri.netloc, os.path.abspath(location) + elif uri.scheme in ("hdfs", "viewfs"): + return uri.scheme, uri.netloc, uri.path + else: + return uri.scheme, uri.netloc, f"{uri.netloc}{uri.path}" diff --git a/pyiceberg/io/fsspec.py b/pyiceberg/io/fsspec.py index d075765ed1..d8fe3dfa40 100644 --- a/pyiceberg/io/fsspec.py +++ b/pyiceberg/io/fsspec.py @@ -36,16 +36,13 @@ from fsspec.implementations.local import LocalFileSystem from requests import HTTPError -from pyiceberg.catalog import TOKEN, URI +from pyiceberg.catalog import TOKEN from pyiceberg.exceptions import SignError from pyiceberg.io import ( - ADLS_ACCOUNT_HOST, ADLS_ACCOUNT_KEY, ADLS_ACCOUNT_NAME, ADLS_CLIENT_ID, - ADLS_CLIENT_SECRET, ADLS_CONNECTION_STRING, - ADLS_CREDENTIAL, ADLS_SAS_TOKEN, ADLS_TENANT_ID, AWS_ACCESS_KEY_ID, @@ -62,8 +59,6 @@ GCS_SESSION_KWARGS, GCS_TOKEN, GCS_VERSION_AWARE, - HF_ENDPOINT, - HF_TOKEN, S3_ACCESS_KEY_ID, S3_CONNECT_TIMEOUT, S3_ENDPOINT, @@ -72,10 +67,10 @@ S3_REQUEST_TIMEOUT, S3_SECRET_ACCESS_KEY, S3_SESSION_TOKEN, - S3_SIGNER, S3_SIGNER_ENDPOINT, S3_SIGNER_ENDPOINT_DEFAULT, S3_SIGNER_URI, + ADLS_ClIENT_SECRET, FileIO, InputFile, InputStream, @@ -92,7 +87,7 @@ def s3v4_rest_signer(properties: Properties, request: "AWSRequest", **_: Any) -> "AWSRequest": - signer_url = properties.get(S3_SIGNER_URI, properties[URI]).rstrip("/") # type: ignore + signer_url = properties.get(S3_SIGNER_URI, properties["uri"]).rstrip("/") signer_endpoint = properties.get(S3_SIGNER_ENDPOINT, S3_SIGNER_ENDPOINT_DEFAULT) signer_headers = {} @@ -142,7 +137,7 @@ def _s3(properties: Properties) -> AbstractFileSystem: config_kwargs = {} register_events: Dict[str, Callable[[Properties], None]] = {} - if signer := properties.get(S3_SIGNER): + if signer := properties.get("s3.signer"): logger.info("Loading signer %s", signer) if signer_func := SIGNERS.get(signer): signer_func_with_properties = partial(signer_func, properties) @@ -167,7 +162,6 @@ def _s3(properties: Properties) -> AbstractFileSystem: fs = S3FileSystem(client_kwargs=client_kwargs, config_kwargs=config_kwargs) for event_name, event_function in register_events.items(): - fs.s3.meta.events.unregister(event_name, unique_id=1925) fs.s3.meta.events.register_last(event_name, event_function, unique_id=1925) return fs @@ -195,7 +189,9 @@ def _adls(properties: Properties) -> AbstractFileSystem: from adlfs import AzureBlobFileSystem for key, sas_token in { - key.replace(f"{ADLS_SAS_TOKEN}.", ""): value for key, value in properties.items() if key.startswith(ADLS_SAS_TOKEN) + key.replace(f"{ADLS_SAS_TOKEN}.", ""): value + for key, value in properties.items() + if key.startswith(ADLS_SAS_TOKEN) and key.endswith(".windows.net") }.items(): if ADLS_ACCOUNT_NAME not in properties: properties[ADLS_ACCOUNT_NAME] = key.split(".")[0] @@ -204,23 +200,12 @@ def _adls(properties: Properties) -> AbstractFileSystem: return AzureBlobFileSystem( connection_string=properties.get(ADLS_CONNECTION_STRING), - credential=properties.get(ADLS_CREDENTIAL), account_name=properties.get(ADLS_ACCOUNT_NAME), account_key=properties.get(ADLS_ACCOUNT_KEY), sas_token=properties.get(ADLS_SAS_TOKEN), tenant_id=properties.get(ADLS_TENANT_ID), client_id=properties.get(ADLS_CLIENT_ID), - client_secret=properties.get(ADLS_CLIENT_SECRET), - account_host=properties.get(ADLS_ACCOUNT_HOST), - ) - - -def _hf(properties: Properties) -> AbstractFileSystem: - from huggingface_hub import HfFileSystem - - return HfFileSystem( - endpoint=properties.get(HF_ENDPOINT), - token=properties.get(HF_TOKEN), + client_secret=properties.get(ADLS_ClIENT_SECRET), ) @@ -234,7 +219,6 @@ def _hf(properties: Properties) -> AbstractFileSystem: "abfss": _adls, "gs": _gs, "gcs": _gs, - "hf": _hf, } diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index c756487c32..c0d078abc7 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -25,9 +25,9 @@ from __future__ import annotations +import concurrent.futures import fnmatch import functools -import importlib import itertools import logging import operator @@ -36,6 +36,7 @@ import uuid import warnings from abc import ABC, abstractmethod +from concurrent.futures import Future from copy import copy from dataclasses import dataclass from enum import Enum @@ -64,12 +65,13 @@ import pyarrow.lib import pyarrow.parquet as pq from pyarrow import ChunkedArray -from pyarrow._s3fs import S3RetryStrategy from pyarrow.fs import ( FileInfo, FileSystem, FileType, + FSSpecHandler, ) +from sortedcontainers import SortedList from pyiceberg.conversions import to_bytes from pyiceberg.exceptions import ResolveError @@ -83,16 +85,6 @@ ) from pyiceberg.expressions.visitors import visit as boolean_expression_visit from pyiceberg.io import ( - ADLS_ACCOUNT_KEY, - ADLS_ACCOUNT_NAME, - ADLS_BLOB_STORAGE_AUTHORITY, - ADLS_BLOB_STORAGE_SCHEME, - ADLS_CLIENT_ID, - ADLS_CLIENT_SECRET, - ADLS_DFS_STORAGE_AUTHORITY, - ADLS_DFS_STORAGE_SCHEME, - ADLS_SAS_TOKEN, - ADLS_TENANT_ID, AWS_ACCESS_KEY_ID, AWS_REGION, AWS_ROLE_ARN, @@ -115,8 +107,6 @@ S3_PROXY_URI, S3_REGION, S3_REQUEST_TIMEOUT, - S3_RESOLVE_REGION, - S3_RETRY_STRATEGY_IMPL, S3_ROLE_ARN, S3_ROLE_SESSION_NAME, S3_SECRET_ACCESS_KEY, @@ -126,6 +116,7 @@ InputStream, OutputFile, OutputStream, + _parse_location, ) from pyiceberg.manifest import ( DataFile, @@ -134,6 +125,7 @@ ) from pyiceberg.partitioning import PartitionField, PartitionFieldValue, PartitionKey, PartitionSpec, partition_record_value from pyiceberg.schema import ( + Accessor, PartnerAccessor, PreOrderSchemaVisitor, Schema, @@ -148,13 +140,11 @@ visit, visit_with_partner, ) -from pyiceberg.table import TableProperties from pyiceberg.table.locations import load_location_provider from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.name_mapping import NameMapping, apply_name_mapping -from pyiceberg.table.puffin import PuffinFile from pyiceberg.transforms import IdentityTransform, TruncateTransform -from pyiceberg.typedef import EMPTY_DICT, Properties, Record, TableVersion +from pyiceberg.typedef import EMPTY_DICT, Properties, Record from pyiceberg.types import ( BinaryType, BooleanType, @@ -172,19 +162,14 @@ PrimitiveType, StringType, StructType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) from pyiceberg.utils.concurrent import ExecutorFactory from pyiceberg.utils.config import Config from pyiceberg.utils.datetime import millis_to_datetime -from pyiceberg.utils.decimal import unscaled_to_decimal -from pyiceberg.utils.deprecated import deprecation_message from pyiceberg.utils.properties import get_first_property_value, property_as_bool, property_as_int from pyiceberg.utils.singleton import Singleton from pyiceberg.utils.truncate import truncate_upper_bound_binary_string, truncate_upper_bound_text_string @@ -209,31 +194,6 @@ T = TypeVar("T") -@lru_cache -def _cached_resolve_s3_region(bucket: str) -> Optional[str]: - from pyarrow.fs import resolve_s3_region - - try: - return resolve_s3_region(bucket=bucket) - except (OSError, TypeError): - logger.warning(f"Unable to resolve region for bucket {bucket}") - return None - - -def _import_retry_strategy(impl: str) -> Optional[S3RetryStrategy]: - try: - path_parts = impl.split(".") - if len(path_parts) < 2: - raise ValueError(f"retry-strategy-impl should be full path (module.CustomS3RetryStrategy), got: {impl}") - module_name, class_name = ".".join(path_parts[:-1]), path_parts[-1] - module = importlib.import_module(module_name) - class_ = getattr(module, class_name) - return class_() - except (ModuleNotFoundError, AttributeError): - warnings.warn(f"Could not initialize S3 retry strategy: {impl}") - return None - - class UnsupportedPyArrowTypeException(Exception): """Cannot convert PyArrow type to corresponding Iceberg type.""" @@ -331,7 +291,9 @@ def open(self, seekable: bool = True) -> InputStream: input_file = self._filesystem.open_input_file(self._path) else: input_file = self._filesystem.open_input_stream(self._path, buffer_size=self._buffer_size) - except (FileNotFoundError, PermissionError): + except FileNotFoundError: + raise + except PermissionError: raise except OSError as e: if e.errno == 2 or "Path does not exist" in str(e): @@ -414,9 +376,6 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste elif scheme in {"gs", "gcs"}: return self._initialize_gcs_fs() - elif scheme in {"abfs", "abfss", "wasb", "wasbs"}: - return self._initialize_azure_fs() - elif scheme in {"file"}: return self._initialize_local_fs() @@ -432,7 +391,6 @@ def _initialize_oss_fs(self) -> FileSystem: "secret_key": get_first_property_value(self.properties, S3_SECRET_ACCESS_KEY, AWS_SECRET_ACCESS_KEY), "session_token": get_first_property_value(self.properties, S3_SESSION_TOKEN, AWS_SESSION_TOKEN), "region": get_first_property_value(self.properties, S3_REGION, AWS_REGION), - "force_virtual_addressing": property_as_bool(self.properties, S3_FORCE_VIRTUAL_ADDRESSING, True), } if proxy_uri := self.properties.get(S3_PROXY_URI): @@ -450,25 +408,29 @@ def _initialize_oss_fs(self) -> FileSystem: if session_name := get_first_property_value(self.properties, S3_ROLE_SESSION_NAME, AWS_ROLE_SESSION_NAME): client_kwargs["session_name"] = session_name + if force_virtual_addressing := self.properties.get(S3_FORCE_VIRTUAL_ADDRESSING): + client_kwargs["force_virtual_addressing"] = property_as_bool(self.properties, force_virtual_addressing, False) + return S3FileSystem(**client_kwargs) def _initialize_s3_fs(self, netloc: Optional[str]) -> FileSystem: - from pyarrow.fs import S3FileSystem + from pyarrow.fs import S3FileSystem, resolve_s3_region + # Resolve region from netloc(bucket), fallback to user-provided region provided_region = get_first_property_value(self.properties, S3_REGION, AWS_REGION) - # Do this when we don't provide the region at all, or when we explicitly enable it - if provided_region is None or property_as_bool(self.properties, S3_RESOLVE_REGION, False) is True: - # Resolve region from netloc(bucket), fallback to user-provided region - # Only supported by buckets hosted by S3 - bucket_region = _cached_resolve_s3_region(bucket=netloc) or provided_region - if provided_region is not None and bucket_region != provided_region: - logger.warning( - f"PyArrow FileIO overriding S3 bucket region for bucket {netloc}: " - f"provided region {provided_region}, actual region {bucket_region}" - ) - else: - bucket_region = provided_region + try: + bucket_region = resolve_s3_region(bucket=netloc) + except (OSError, TypeError): + bucket_region = None + logger.warning(f"Unable to resolve region for bucket {netloc}, using default region {provided_region}") + + bucket_region = bucket_region or provided_region + if bucket_region != provided_region: + logger.warning( + f"PyArrow FileIO overriding S3 bucket region for bucket {netloc}: " + f"provided region {provided_region}, actual region {bucket_region}" + ) client_kwargs: Dict[str, Any] = { "endpoint_override": self.properties.get(S3_ENDPOINT), @@ -493,72 +455,11 @@ def _initialize_s3_fs(self, netloc: Optional[str]) -> FileSystem: if session_name := get_first_property_value(self.properties, S3_ROLE_SESSION_NAME, AWS_ROLE_SESSION_NAME): client_kwargs["session_name"] = session_name - if self.properties.get(S3_FORCE_VIRTUAL_ADDRESSING) is not None: - client_kwargs["force_virtual_addressing"] = property_as_bool(self.properties, S3_FORCE_VIRTUAL_ADDRESSING, False) - - if (retry_strategy_impl := self.properties.get(S3_RETRY_STRATEGY_IMPL)) and ( - retry_instance := _import_retry_strategy(retry_strategy_impl) - ): - client_kwargs["retry_strategy"] = retry_instance + if force_virtual_addressing := self.properties.get(S3_FORCE_VIRTUAL_ADDRESSING): + client_kwargs["force_virtual_addressing"] = property_as_bool(self.properties, force_virtual_addressing, False) return S3FileSystem(**client_kwargs) - def _initialize_azure_fs(self) -> FileSystem: - # https://arrow.apache.org/docs/python/generated/pyarrow.fs.AzureFileSystem.html - from packaging import version - - MIN_PYARROW_VERSION_SUPPORTING_AZURE_FS = "20.0.0" - if version.parse(pyarrow.__version__) < version.parse(MIN_PYARROW_VERSION_SUPPORTING_AZURE_FS): - raise ImportError( - f"pyarrow version >= {MIN_PYARROW_VERSION_SUPPORTING_AZURE_FS} required for AzureFileSystem support, " - f"but found version {pyarrow.__version__}." - ) - - from pyarrow.fs import AzureFileSystem - - client_kwargs: Dict[str, str] = {} - - if account_name := self.properties.get(ADLS_ACCOUNT_NAME): - client_kwargs["account_name"] = account_name - - if account_key := self.properties.get(ADLS_ACCOUNT_KEY): - client_kwargs["account_key"] = account_key - - if blob_storage_authority := self.properties.get(ADLS_BLOB_STORAGE_AUTHORITY): - client_kwargs["blob_storage_authority"] = blob_storage_authority - - if dfs_storage_authority := self.properties.get(ADLS_DFS_STORAGE_AUTHORITY): - client_kwargs["dfs_storage_authority"] = dfs_storage_authority - - if blob_storage_scheme := self.properties.get(ADLS_BLOB_STORAGE_SCHEME): - client_kwargs["blob_storage_scheme"] = blob_storage_scheme - - if dfs_storage_scheme := self.properties.get(ADLS_DFS_STORAGE_SCHEME): - client_kwargs["dfs_storage_scheme"] = dfs_storage_scheme - - if sas_token := self.properties.get(ADLS_SAS_TOKEN): - client_kwargs["sas_token"] = sas_token - - if client_id := self.properties.get(ADLS_CLIENT_ID): - client_kwargs["client_id"] = client_id - if client_secret := self.properties.get(ADLS_CLIENT_SECRET): - client_kwargs["client_secret"] = client_secret - if tenant_id := self.properties.get(ADLS_TENANT_ID): - client_kwargs["tenant_id"] = tenant_id - - # Validate that all three are provided together for ClientSecretCredential - credential_keys = ["client_id", "client_secret", "tenant_id"] - provided_keys = [key for key in credential_keys if key in client_kwargs] - if provided_keys and len(provided_keys) != len(credential_keys): - missing_keys = [key for key in credential_keys if key not in client_kwargs] - raise ValueError( - f"client_id, client_secret, and tenant_id must all be provided together " - f"to use ClientSecretCredential for Azure authentication. " - f"Provided: {provided_keys}, Missing: {missing_keys}" - ) - - return AzureFileSystem(**client_kwargs) - def _initialize_hdfs_fs(self, scheme: str, netloc: Optional[str]) -> FileSystem: from pyarrow.fs import HadoopFileSystem @@ -720,12 +621,6 @@ def visit_fixed(self, fixed_type: FixedType) -> pa.DataType: return pa.binary(len(fixed_type)) def visit_decimal(self, decimal_type: DecimalType) -> pa.DataType: - # It looks like decimal{32,64} is not fully implemented: - # https://github.com/apache/arrow/issues/25483 - # https://github.com/apache/arrow/issues/43956 - # However, if we keep it as 128 in memory, and based on the - # precision/scale Arrow will map it to INT{32,64} - # https://github.com/apache/arrow/blob/598938711a8376cbfdceaf5c77ab0fd5057e6c02/cpp/src/parquet/arrow/schema.cc#L380-L392 return pa.decimal128(decimal_type.precision, decimal_type.scale) def visit_boolean(self, _: BooleanType) -> pa.DataType: @@ -755,23 +650,14 @@ def visit_time(self, _: TimeType) -> pa.DataType: def visit_timestamp(self, _: TimestampType) -> pa.DataType: return pa.timestamp(unit="us") - def visit_timestamp_ns(self, _: TimestampNanoType) -> pa.DataType: - return pa.timestamp(unit="ns") - def visit_timestamptz(self, _: TimestamptzType) -> pa.DataType: return pa.timestamp(unit="us", tz="UTC") - def visit_timestamptz_ns(self, _: TimestamptzNanoType) -> pa.DataType: - return pa.timestamp(unit="ns", tz="UTC") - def visit_string(self, _: StringType) -> pa.DataType: return pa.large_string() def visit_uuid(self, _: UUIDType) -> pa.DataType: - return pa.uuid() - - def visit_unknown(self, _: UnknownType) -> pa.DataType: - return pa.null() + return pa.binary(16) def visit_binary(self, _: BinaryType) -> pa.DataType: return pa.large_binary() @@ -1000,25 +886,21 @@ def _get_file_format(file_format: FileFormat, **kwargs: Dict[str, Any]) -> ds.Fi raise ValueError(f"Unsupported file format: {file_format}") -def _read_deletes(io: FileIO, data_file: DataFile) -> Dict[str, pa.ChunkedArray]: - if data_file.file_format == FileFormat.PARQUET: - with io.new_input(data_file.file_path).open() as fi: - delete_fragment = _get_file_format( - data_file.file_format, dictionary_columns=("file_path",), pre_buffer=True, buffer_size=ONE_MEGABYTE - ).make_fragment(fi) - table = ds.Scanner.from_fragment(fragment=delete_fragment).to_table() - table = table.unify_dictionaries() - return { - file.as_py(): table.filter(pc.field("file_path") == file).column("pos") - for file in table.column("file_path").chunks[0].dictionary - } - elif data_file.file_format == FileFormat.PUFFIN: - with io.new_input(data_file.file_path).open() as fi: - payload = fi.read() +def _construct_fragment(fs: FileSystem, data_file: DataFile, file_format_kwargs: Dict[str, Any] = EMPTY_DICT) -> ds.Fragment: + _, _, path = PyArrowFileIO.parse_location(data_file.file_path) + return _get_file_format(data_file.file_format, **file_format_kwargs).make_fragment(path, fs) - return PuffinFile(payload).to_vector() - else: - raise ValueError(f"Delete file format not supported: {data_file.file_format}") + +def _read_deletes(fs: FileSystem, data_file: DataFile) -> Dict[str, pa.ChunkedArray]: + delete_fragment = _construct_fragment( + fs, data_file, file_format_kwargs={"dictionary_columns": ("file_path",), "pre_buffer": True, "buffer_size": ONE_MEGABYTE} + ) + table = ds.Scanner.from_fragment(fragment=delete_fragment).to_table() + table = table.unify_dictionaries() + return { + file.as_py(): table.filter(pc.field("file_path") == file).column("pos") + for file in table.column("file_path").chunks[0].dictionary + } def _combine_positional_deletes(positional_deletes: List[pa.ChunkedArray], start_index: int, end_index: int) -> pa.Array: @@ -1040,20 +922,13 @@ def _combine_positional_deletes(positional_deletes: List[pa.ChunkedArray], start def pyarrow_to_schema( - schema: pa.Schema, - name_mapping: Optional[NameMapping] = None, - downcast_ns_timestamp_to_us: bool = False, - format_version: TableVersion = TableProperties.DEFAULT_FORMAT_VERSION, + schema: pa.Schema, name_mapping: Optional[NameMapping] = None, downcast_ns_timestamp_to_us: bool = False ) -> Schema: has_ids = visit_pyarrow(schema, _HasIds()) if has_ids: - return visit_pyarrow( - schema, _ConvertToIceberg(downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, format_version=format_version) - ) + return visit_pyarrow(schema, _ConvertToIceberg(downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us)) elif name_mapping is not None: - schema_without_ids = _pyarrow_to_schema_without_ids( - schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, format_version=format_version - ) + schema_without_ids = _pyarrow_to_schema_without_ids(schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us) return apply_name_mapping(schema_without_ids, name_mapping) else: raise ValueError( @@ -1061,15 +936,8 @@ def pyarrow_to_schema( ) -def _pyarrow_to_schema_without_ids( - schema: pa.Schema, - downcast_ns_timestamp_to_us: bool = False, - format_version: TableVersion = TableProperties.DEFAULT_FORMAT_VERSION, -) -> Schema: - return visit_pyarrow( - schema, - _ConvertToIcebergWithoutIDs(downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, format_version=format_version), - ) +def _pyarrow_to_schema_without_ids(schema: pa.Schema, downcast_ns_timestamp_to_us: bool = False) -> Schema: + return visit_pyarrow(schema, _ConvertToIcebergWithoutIDs(downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us)) def _pyarrow_schema_ensure_large_types(schema: pa.Schema) -> pa.Schema: @@ -1251,12 +1119,9 @@ class _ConvertToIceberg(PyArrowSchemaVisitor[Union[IcebergType, Schema]]): _field_names: List[str] - def __init__( - self, downcast_ns_timestamp_to_us: bool = False, format_version: TableVersion = TableProperties.DEFAULT_FORMAT_VERSION - ) -> None: # noqa: F821 + def __init__(self, downcast_ns_timestamp_to_us: bool = False) -> None: self._field_names = [] self._downcast_ns_timestamp_to_us = downcast_ns_timestamp_to_us - self._format_version = format_version def _field_id(self, field: pa.Field) -> int: if (field_id := _get_field_id(field)) is not None: @@ -1313,7 +1178,7 @@ def primitive(self, primitive: pa.DataType) -> PrimitiveType: elif isinstance(primitive, pa.Decimal128Type): primitive = cast(pa.Decimal128Type, primitive) return DecimalType(primitive.precision, primitive.scale) - elif pa.types.is_string(primitive) or pa.types.is_large_string(primitive) or pa.types.is_string_view(primitive): + elif pa.types.is_string(primitive) or pa.types.is_large_string(primitive): return StringType() elif pa.types.is_date32(primitive): return DateType() @@ -1327,11 +1192,6 @@ def primitive(self, primitive: pa.DataType) -> PrimitiveType: elif primitive.unit == "ns": if self._downcast_ns_timestamp_to_us: logger.warning("Iceberg does not yet support 'ns' timestamp precision. Downcasting to 'us'.") - elif self._format_version >= 3: - if primitive.tz in UTC_ALIASES: - return TimestamptzNanoType() - else: - return TimestampNanoType() else: raise TypeError( "Iceberg does not yet support 'ns' timestamp precision. Use 'downcast-ns-timestamp-to-us-on-write' configuration property to automatically downcast 'ns' to 'us' on write.", @@ -1344,15 +1204,11 @@ def primitive(self, primitive: pa.DataType) -> PrimitiveType: elif primitive.tz is None: return TimestampType() - elif pa.types.is_binary(primitive) or pa.types.is_large_binary(primitive) or pa.types.is_binary_view(primitive): + elif pa.types.is_binary(primitive) or pa.types.is_large_binary(primitive): return BinaryType() elif pa.types.is_fixed_size_binary(primitive): primitive = cast(pa.FixedSizeBinaryType, primitive) return FixedType(primitive.byte_width) - elif pa.types.is_null(primitive): - return UnknownType() - elif isinstance(primitive, pa.UuidType): - return UUIDType() raise TypeError(f"Unsupported type: {primitive}") @@ -1446,27 +1302,45 @@ def _field_id(self, field: pa.Field) -> int: def _get_column_projection_values( file: DataFile, projected_schema: Schema, partition_spec: Optional[PartitionSpec], file_project_field_ids: Set[int] -) -> Dict[int, Any]: +) -> Tuple[bool, Dict[str, Any]]: """Apply Column Projection rules to File Schema.""" project_schema_diff = projected_schema.field_ids.difference(file_project_field_ids) - if len(project_schema_diff) == 0 or partition_spec is None: - return EMPTY_DICT + should_project_columns = len(project_schema_diff) > 0 + projected_missing_fields: Dict[str, Any] = {} + + if not should_project_columns: + return False, {} - partition_schema = partition_spec.partition_type(projected_schema) - accessors = build_position_accessors(partition_schema) + partition_schema: StructType + accessors: Dict[int, Accessor] + + if partition_spec is not None: + partition_schema = partition_spec.partition_type(projected_schema) + accessors = build_position_accessors(partition_schema) + else: + return False, {} - projected_missing_fields = {} for field_id in project_schema_diff: for partition_field in partition_spec.fields_by_source_id(field_id): if isinstance(partition_field.transform, IdentityTransform): - if partition_value := accessors[partition_field.field_id].get(file.partition): - projected_missing_fields[field_id] = partition_value + accessor = accessors.get(partition_field.field_id) - return projected_missing_fields + if accessor is None: + continue + + # The partition field may not exist in the partition record of the data file. + # This can happen when new partition fields are introduced after the file was written. + try: + if partition_value := accessor.get(file.partition): + projected_missing_fields[partition_field.name] = partition_value + except IndexError: + continue + + return True, projected_missing_fields def _task_to_record_batches( - io: FileIO, + fs: FileSystem, task: FileScanTask, bound_row_filter: BooleanExpression, projected_schema: Schema, @@ -1474,10 +1348,12 @@ def _task_to_record_batches( positional_deletes: Optional[List[ChunkedArray]], case_sensitive: bool, name_mapping: Optional[NameMapping] = None, + use_large_types: bool = True, partition_spec: Optional[PartitionSpec] = None, ) -> Iterator[pa.RecordBatch]: + _, _, path = _parse_location(task.file.file_path) arrow_format = ds.ParquetFileFormat(pre_buffer=True, buffer_size=(ONE_MEGABYTE * 8)) - with io.new_input(task.file.file_path).open() as fin: + with fs.open_input_file(path) as fin: fragment = arrow_format.make_fragment(fin) physical_schema = fragment.physical_schema # In V1 and V2 table formats, we only support Timestamp 'us' in Iceberg Schema @@ -1486,24 +1362,29 @@ def _task_to_record_batches( # the table format version. file_schema = pyarrow_to_schema(physical_schema, name_mapping, downcast_ns_timestamp_to_us=True) - # Apply column projection rules: https://iceberg.apache.org/spec/#column-projection - projected_missing_fields = _get_column_projection_values( - task.file, projected_schema, partition_spec, file_schema.field_ids - ) - pyarrow_filter = None if bound_row_filter is not AlwaysTrue(): - translated_row_filter = translate_column_names( - bound_row_filter, file_schema, case_sensitive=case_sensitive, projected_field_values=projected_missing_fields - ) + translated_row_filter = translate_column_names(bound_row_filter, file_schema, case_sensitive=case_sensitive) bound_file_filter = bind(file_schema, translated_row_filter, case_sensitive=case_sensitive) pyarrow_filter = expression_to_pyarrow(bound_file_filter) + # Apply column projection rules + # https://iceberg.apache.org/spec/#column-projection + should_project_columns, projected_missing_fields = _get_column_projection_values( + task.file, projected_schema, partition_spec, file_schema.field_ids + ) + file_project_schema = prune_columns(file_schema, projected_field_ids, select_full_types=False) fragment_scanner = ds.Scanner.from_fragment( fragment=fragment, - schema=physical_schema, + # With PyArrow 16.0.0 there is an issue with casting record-batches: + # https://github.com/apache/arrow/issues/41884 + # https://github.com/apache/arrow/issues/43183 + # Would be good to remove this later on + schema=_pyarrow_schema_ensure_large_types(physical_schema) + if use_large_types + else (_pyarrow_schema_ensure_small_types(physical_schema)), # This will push down the query to Arrow. # But in case there are positional deletes, we have to apply them first filter=pyarrow_filter if not positional_deletes else None, @@ -1528,23 +1409,28 @@ def _task_to_record_batches( # Apply the user filter if pyarrow_filter is not None: - # Temporary fix until PyArrow 21 is released ( https://github.com/apache/arrow/pull/46057 ) - table = pa.Table.from_batches([current_batch]) - table = table.filter(pyarrow_filter) + current_batch = current_batch.filter(pyarrow_filter) # skip empty batches - if table.num_rows == 0: + if current_batch.num_rows == 0: continue - current_batch = table.combine_chunks().to_batches()[0] - - yield _to_requested_schema( + result_batch = _to_requested_schema( projected_schema, file_project_schema, current_batch, downcast_ns_timestamp_to_us=True, - projected_missing_fields=projected_missing_fields, + use_large_types=use_large_types, ) + # Inject projected column values if available + if should_project_columns: + for name, value in projected_missing_fields.items(): + index = result_batch.schema.get_field_index(name) + if index != -1: + result_batch = result_batch.set_column(index, name, [value]) + + yield result_batch + def _read_all_delete_files(io: FileIO, tasks: Iterable[FileScanTask]) -> Dict[str, List[ChunkedArray]]: deletes_per_file: Dict[str, List[ChunkedArray]] = {} @@ -1553,7 +1439,7 @@ def _read_all_delete_files(io: FileIO, tasks: Iterable[FileScanTask]) -> Dict[st executor = ExecutorFactory.get_or_create() deletes_per_files: Iterator[Dict[str, ChunkedArray]] = executor.map( lambda args: _read_deletes(*args), - [(io, delete_file) for delete_file in unique_deletes], + [(_fs_from_file_path(io, delete_file.file_path), delete_file) for delete_file in unique_deletes], ) for delete in deletes_per_files: for file, arr in delete.items(): @@ -1565,6 +1451,25 @@ def _read_all_delete_files(io: FileIO, tasks: Iterable[FileScanTask]) -> Dict[st return deletes_per_file +def _fs_from_file_path(io: FileIO, file_path: str) -> FileSystem: + scheme, netloc, _ = _parse_location(file_path) + if isinstance(io, PyArrowFileIO): + return io.fs_by_scheme(scheme, netloc) + else: + try: + from pyiceberg.io.fsspec import FsspecFileIO + + if isinstance(io, FsspecFileIO): + from pyarrow.fs import PyFileSystem + + return PyFileSystem(FSSpecHandler(io.get_fs(scheme))) + else: + raise ValueError(f"Expected PyArrowFileIO or FsspecFileIO, got: {io}") + except ModuleNotFoundError as e: + # When FsSpec is not installed + raise ValueError(f"Expected PyArrowFileIO or FsspecFileIO, got: {io}") from e + + class ArrowScan: _table_metadata: TableMetadata _io: FileIO @@ -1599,6 +1504,14 @@ def __init__( self._case_sensitive = case_sensitive self._limit = limit + @property + def _use_large_types(self) -> bool: + """Whether to represent data as large arrow types. + + Defaults to True. + """ + return property_as_bool(self._io.properties, PYARROW_USE_LARGE_TYPES_ON_READ, True) + @property def _projected_field_ids(self) -> Set[int]: """Set of field IDs that should be projected from the data files.""" @@ -1625,28 +1538,48 @@ def to_table(self, tasks: Iterable[FileScanTask]) -> pa.Table: ResolveError: When a required field cannot be found in the file ValueError: When a field type in the file cannot be projected to the schema type """ - arrow_schema = schema_to_pyarrow(self._projected_schema, include_field_ids=False) + deletes_per_file = _read_all_delete_files(self._io, tasks) + executor = ExecutorFactory.get_or_create() - batches = self.to_record_batches(tasks) - try: - first_batch = next(batches) - except StopIteration: - # Empty - return arrow_schema.empty_table() - - # Note: cannot use pa.Table.from_batches(itertools.chain([first_batch], batches))) - # as different batches can use different schema's (due to large_ types) - result = pa.concat_tables( - (pa.Table.from_batches([batch]) for batch in itertools.chain([first_batch], batches)), promote_options="permissive" - ) + def _table_from_scan_task(task: FileScanTask) -> pa.Table: + batches = list(self._record_batches_from_scan_tasks_and_deletes([task], deletes_per_file)) + if len(batches) > 0: + return pa.Table.from_batches(batches) + else: + return None - if property_as_bool(self._io.properties, PYARROW_USE_LARGE_TYPES_ON_READ, False): - deprecation_message( - deprecated_in="0.10.0", - removed_in="0.11.0", - help_message=f"Property `{PYARROW_USE_LARGE_TYPES_ON_READ}` will be removed.", + futures = [ + executor.submit( + _table_from_scan_task, + task, ) - result = result.cast(arrow_schema) + for task in tasks + ] + total_row_count = 0 + # for consistent ordering, we need to maintain future order + futures_index = {f: i for i, f in enumerate(futures)} + completed_futures: SortedList[Future[pa.Table]] = SortedList(iterable=[], key=lambda f: futures_index[f]) + for future in concurrent.futures.as_completed(futures): + completed_futures.add(future) + if table_result := future.result(): + total_row_count += len(table_result) + # stop early if limit is satisfied + if self._limit is not None and total_row_count >= self._limit: + break + + # by now, we've either completed all tasks or satisfied the limit + if self._limit is not None: + _ = [f.cancel() for f in futures if not f.done()] + + tables = [f.result() for f in completed_futures if f.result()] + + if len(tables) < 1: + return pa.Table.from_batches([], schema=schema_to_pyarrow(self._projected_schema, include_field_ids=False)) + + result = pa.concat_tables(tables, promote_options="permissive") + + if self._limit is not None: + return result.slice(0, self._limit) return result @@ -1669,32 +1602,7 @@ def to_record_batches(self, tasks: Iterable[FileScanTask]) -> Iterator[pa.Record ValueError: When a field type in the file cannot be projected to the schema type """ deletes_per_file = _read_all_delete_files(self._io, tasks) - - total_row_count = 0 - executor = ExecutorFactory.get_or_create() - - def batches_for_task(task: FileScanTask) -> List[pa.RecordBatch]: - # Materialize the iterator here to ensure execution happens within the executor. - # Otherwise, the iterator would be lazily consumed later (in the main thread), - # defeating the purpose of using executor.map. - return list(self._record_batches_from_scan_tasks_and_deletes([task], deletes_per_file)) - - limit_reached = False - for batches in executor.map(batches_for_task, tasks): - for batch in batches: - current_batch_size = len(batch) - if self._limit is not None and total_row_count + current_batch_size >= self._limit: - yield batch.slice(0, self._limit - total_row_count) - - limit_reached = True - break - else: - yield batch - total_row_count += current_batch_size - - if limit_reached: - # This break will also cancel all running tasks in the executor - break + return self._record_batches_from_scan_tasks_and_deletes(tasks, deletes_per_file) def _record_batches_from_scan_tasks_and_deletes( self, tasks: Iterable[FileScanTask], deletes_per_file: Dict[str, List[ChunkedArray]] @@ -1704,7 +1612,7 @@ def _record_batches_from_scan_tasks_and_deletes( if self._limit is not None and total_row_count >= self._limit: break batches = _task_to_record_batches( - self._io, + _fs_from_file_path(self._io, task.file.file_path), task, self._bound_row_filter, self._projected_schema, @@ -1712,7 +1620,8 @@ def _record_batches_from_scan_tasks_and_deletes( deletes_per_file.get(task.file.file_path), self._case_sensitive, self._table_metadata.name_mapping(), - self._table_metadata.specs().get(task.file.spec_id), + self._use_large_types, + self._table_metadata.spec(), ) for batch in batches: if self._limit is not None: @@ -1730,15 +1639,13 @@ def _to_requested_schema( batch: pa.RecordBatch, downcast_ns_timestamp_to_us: bool = False, include_field_ids: bool = False, - projected_missing_fields: Dict[int, Any] = EMPTY_DICT, + use_large_types: bool = True, ) -> pa.RecordBatch: # We could reuse some of these visitors struct_array = visit_with_partner( requested_schema, batch, - ArrowProjectionVisitor( - file_schema, downcast_ns_timestamp_to_us, include_field_ids, projected_missing_fields=projected_missing_fields - ), + ArrowProjectionVisitor(file_schema, downcast_ns_timestamp_to_us, include_field_ids, use_large_types), ArrowAccessor(file_schema), ) return pa.RecordBatch.from_struct_array(struct_array) @@ -1748,29 +1655,19 @@ class ArrowProjectionVisitor(SchemaWithPartnerVisitor[pa.Array, Optional[pa.Arra _file_schema: Schema _include_field_ids: bool _downcast_ns_timestamp_to_us: bool - _use_large_types: Optional[bool] - _projected_missing_fields: Dict[int, Any] + _use_large_types: bool def __init__( self, file_schema: Schema, downcast_ns_timestamp_to_us: bool = False, include_field_ids: bool = False, - use_large_types: Optional[bool] = None, - projected_missing_fields: Dict[int, Any] = EMPTY_DICT, + use_large_types: bool = True, ) -> None: self._file_schema = file_schema self._include_field_ids = include_field_ids self._downcast_ns_timestamp_to_us = downcast_ns_timestamp_to_us self._use_large_types = use_large_types - self._projected_missing_fields = projected_missing_fields - - if use_large_types is not None: - deprecation_message( - deprecated_in="0.10.0", - removed_in="0.11.0", - help_message="Argument `use_large_types` will be removed from ArrowProjectionVisitor", - ) def _cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array: file_field = self._file_schema.find_field(field.field_id) @@ -1780,7 +1677,7 @@ def _cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array: target_schema = schema_to_pyarrow( promote(file_field.field_type, field.field_type), include_field_ids=self._include_field_ids ) - if self._use_large_types is False: + if not self._use_large_types: target_schema = _pyarrow_schema_ensure_small_types(target_schema) return values.cast(target_schema) elif (target_type := schema_to_pyarrow(field.field_type, include_field_ids=self._include_field_ids)) != values.type: @@ -1840,15 +1737,9 @@ def struct( array = self._cast_if_needed(field, field_array) field_arrays.append(array) fields.append(self._construct_field(field, array.type)) - elif field.optional or field.initial_default is not None: - # When an optional field is added, or when a required field with a non-null initial default is added - arrow_type = schema_to_pyarrow(field.field_type, include_field_ids=self._include_field_ids) - if projected_value := self._projected_missing_fields.get(field.field_id): - field_arrays.append(pa.repeat(pa.scalar(projected_value, type=arrow_type), len(struct_array))) - elif field.initial_default is None: - field_arrays.append(pa.nulls(len(struct_array), type=arrow_type)) - else: - field_arrays.append(pa.repeat(pa.scalar(field.initial_default, type=arrow_type), len(struct_array))) + elif field.optional: + arrow_type = schema_to_pyarrow(field.field_type, include_field_ids=False) + field_arrays.append(pa.nulls(len(struct_array), type=arrow_type)) fields.append(self._construct_field(field, arrow_type)) else: raise ResolveError(f"Field is required, and could not be found in the file: {field}") @@ -1959,7 +1850,7 @@ def visit_fixed(self, fixed_type: FixedType) -> str: return "FIXED_LEN_BYTE_ARRAY" def visit_decimal(self, decimal_type: DecimalType) -> str: - return "INT32" if decimal_type.precision <= 9 else "INT64" if decimal_type.precision <= 18 else "FIXED_LEN_BYTE_ARRAY" + return "FIXED_LEN_BYTE_ARRAY" def visit_boolean(self, boolean_type: BooleanType) -> str: return "BOOLEAN" @@ -1985,15 +1876,9 @@ def visit_time(self, time_type: TimeType) -> str: def visit_timestamp(self, timestamp_type: TimestampType) -> str: return "INT64" - def visit_timestamp_ns(self, timestamp_type: TimestampNanoType) -> str: - return "INT64" - def visit_timestamptz(self, timestamptz_type: TimestamptzType) -> str: return "INT64" - def visit_timestamptz_ns(self, timestamptz_ns_type: TimestamptzNanoType) -> str: - return "INT64" - def visit_string(self, string_type: StringType) -> str: return "BYTE_ARRAY" @@ -2003,9 +1888,6 @@ def visit_uuid(self, uuid_type: UUIDType) -> str: def visit_binary(self, binary_type: BinaryType) -> str: return "BYTE_ARRAY" - def visit_unknown(self, unknown_type: UnknownType) -> str: - return "UNKNOWN" - _PRIMITIVE_TO_PHYSICAL_TYPE_VISITOR = PrimitiveToPhysicalType() @@ -2281,7 +2163,7 @@ def parquet_path_to_id_mapping( Compute the mapping of parquet column path to Iceberg ID. For each column, the parquet file metadata has a path_in_schema attribute that follows - a specific naming scheme for nested columns. This function computes a mapping of + a specific naming scheme for nested columnds. This function computes a mapping of the full paths to the corresponding Iceberg IDs. Args: @@ -2307,39 +2189,32 @@ def _partition_value(self, partition_field: PartitionField, schema: Schema) -> A if partition_field.source_id not in self.column_aggregates: return None - source_field = schema.find_field(partition_field.source_id) - iceberg_transform = partition_field.transform - - if not iceberg_transform.preserves_order: + if not partition_field.transform.preserves_order: raise ValueError( f"Cannot infer partition value from parquet metadata for a non-linear Partition Field: {partition_field.name} with transform {partition_field.transform}" ) - transform_func = iceberg_transform.transform(source_field.field_type) - - lower_value = transform_func( - partition_record_value( - partition_field=partition_field, - value=self.column_aggregates[partition_field.source_id].current_min, - schema=schema, - ) + lower_value = partition_record_value( + partition_field=partition_field, + value=self.column_aggregates[partition_field.source_id].current_min, + schema=schema, ) - upper_value = transform_func( - partition_record_value( - partition_field=partition_field, - value=self.column_aggregates[partition_field.source_id].current_max, - schema=schema, - ) + upper_value = partition_record_value( + partition_field=partition_field, + value=self.column_aggregates[partition_field.source_id].current_max, + schema=schema, ) if lower_value != upper_value: raise ValueError( f"Cannot infer partition value from parquet metadata as there are more than one partition values for Partition Field: {partition_field.name}. {lower_value=}, {upper_value=}" ) - return lower_value + source_field = schema.find_field(partition_field.source_id) + transform = partition_field.transform.transform(source_field.field_type) + return transform(lower_value) def partition(self, partition_spec: PartitionSpec, schema: Schema) -> Record: - return Record(*[self._partition_value(field, schema) for field in partition_spec.fields]) + return Record(**{field.name: self._partition_value(field, schema) for field in partition_spec.fields}) def to_serialized_dict(self) -> Dict[str, Any]: lower_bounds = {} @@ -2436,20 +2311,12 @@ def data_file_statistics_from_parquet_metadata( continue if field_id not in col_aggs: - try: - col_aggs[field_id] = StatsAggregator( - stats_col.iceberg_type, statistics.physical_type, stats_col.mode.length - ) - except ValueError as e: - raise ValueError(f"{e} for column '{stats_col.column_name}'") from e - - if isinstance(stats_col.iceberg_type, DecimalType) and statistics.physical_type != "FIXED_LEN_BYTE_ARRAY": - scale = stats_col.iceberg_type.scale - col_aggs[field_id].update_min(unscaled_to_decimal(statistics.min_raw, scale)) - col_aggs[field_id].update_max(unscaled_to_decimal(statistics.max_raw, scale)) - else: - col_aggs[field_id].update_min(statistics.min) - col_aggs[field_id].update_max(statistics.max) + col_aggs[field_id] = StatsAggregator( + stats_col.iceberg_type, statistics.physical_type, stats_col.mode.length + ) + + col_aggs[field_id].update_min(statistics.min) + col_aggs[field_id].update_max(statistics.max) except pyarrow.lib.ArrowNotImplementedError as e: invalidate_col.add(field_id) @@ -2513,16 +2380,14 @@ def write_parquet(task: WriteTask) -> DataFile: ) fo = io.new_output(file_path) with fo.create(overwrite=True) as fos: - with pq.ParquetWriter( - fos, schema=arrow_table.schema, store_decimal_as_integer=True, **parquet_writer_kwargs - ) as writer: + with pq.ParquetWriter(fos, schema=arrow_table.schema, **parquet_writer_kwargs) as writer: writer.write(arrow_table, row_group_size=row_group_size) statistics = data_file_statistics_from_parquet_metadata( parquet_metadata=writer.writer.metadata, stats_columns=compute_statistics_plan(file_schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(file_schema), ) - data_file = DataFile.from_args( + data_file = DataFile( content=DataFileContent.DATA, file_path=file_path, file_format=FileFormat.PARQUET, @@ -2564,10 +2429,7 @@ def bin_pack_arrow_table(tbl: pa.Table, target_file_size: int) -> Iterator[List[ def _check_pyarrow_schema_compatible( - requested_schema: Schema, - provided_schema: pa.Schema, - downcast_ns_timestamp_to_us: bool = False, - format_version: TableVersion = TableProperties.DEFAULT_FORMAT_VERSION, + requested_schema: Schema, provided_schema: pa.Schema, downcast_ns_timestamp_to_us: bool = False ) -> None: """ Check if the `requested_schema` is compatible with `provided_schema`. @@ -2580,15 +2442,10 @@ def _check_pyarrow_schema_compatible( name_mapping = requested_schema.name_mapping try: provided_schema = pyarrow_to_schema( - provided_schema, - name_mapping=name_mapping, - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, - format_version=format_version, + provided_schema, name_mapping=name_mapping, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us ) except ValueError as e: - provided_schema = _pyarrow_to_schema_without_ids( - provided_schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, format_version=format_version - ) + provided_schema = _pyarrow_to_schema_without_ids(provided_schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us) additional_names = set(provided_schema._name_to_id.keys()) - set(requested_schema._name_to_id.keys()) raise ValueError( f"PyArrow table contains more columns: {', '.join(sorted(additional_names))}. Update the schema first (hint, use union_by_name)." @@ -2598,43 +2455,36 @@ def _check_pyarrow_schema_compatible( def parquet_files_to_data_files(io: FileIO, table_metadata: TableMetadata, file_paths: Iterator[str]) -> Iterator[DataFile]: for file_path in file_paths: - data_file = parquet_file_to_data_file(io=io, table_metadata=table_metadata, file_path=file_path) - yield data_file - + input_file = io.new_input(file_path) + with input_file.open() as input_stream: + parquet_metadata = pq.read_metadata(input_stream) -def parquet_file_to_data_file(io: FileIO, table_metadata: TableMetadata, file_path: str) -> DataFile: - input_file = io.new_input(file_path) - with input_file.open() as input_stream: - parquet_metadata = pq.read_metadata(input_stream) + if visit_pyarrow(parquet_metadata.schema.to_arrow_schema(), _HasIds()): + raise NotImplementedError( + f"Cannot add file {file_path} because it has field IDs. `add_files` only supports addition of files without field_ids" + ) + schema = table_metadata.schema() + _check_pyarrow_schema_compatible(schema, parquet_metadata.schema.to_arrow_schema()) - arrow_schema = parquet_metadata.schema.to_arrow_schema() - if visit_pyarrow(arrow_schema, _HasIds()): - raise NotImplementedError( - f"Cannot add file {file_path} because it has field IDs. `add_files` only supports addition of files without field_ids" + statistics = data_file_statistics_from_parquet_metadata( + parquet_metadata=parquet_metadata, + stats_columns=compute_statistics_plan(schema, table_metadata.properties), + parquet_column_mapping=parquet_path_to_id_mapping(schema), + ) + data_file = DataFile( + content=DataFileContent.DATA, + file_path=file_path, + file_format=FileFormat.PARQUET, + partition=statistics.partition(table_metadata.spec(), table_metadata.schema()), + file_size_in_bytes=len(input_file), + sort_order_id=None, + spec_id=table_metadata.default_spec_id, + equality_ids=None, + key_metadata=None, + **statistics.to_serialized_dict(), ) - schema = table_metadata.schema() - _check_pyarrow_schema_compatible(schema, arrow_schema, format_version=table_metadata.format_version) - - statistics = data_file_statistics_from_parquet_metadata( - parquet_metadata=parquet_metadata, - stats_columns=compute_statistics_plan(schema, table_metadata.properties), - parquet_column_mapping=parquet_path_to_id_mapping(schema), - ) - data_file = DataFile.from_args( - content=DataFileContent.DATA, - file_path=file_path, - file_format=FileFormat.PARQUET, - partition=statistics.partition(table_metadata.spec(), table_metadata.schema()), - file_size_in_bytes=len(input_file), - sort_order_id=None, - spec_id=table_metadata.default_spec_id, - equality_ids=None, - key_metadata=None, - **statistics.to_serialized_dict(), - ) - - return data_file + yield data_file ICEBERG_UNCOMPRESSED_CODEC = "uncompressed" @@ -2705,12 +2555,7 @@ def _dataframe_to_data_files( ) name_mapping = table_metadata.schema().name_mapping downcast_ns_timestamp_to_us = Config().get_bool(DOWNCAST_NS_TIMESTAMP_TO_US_ON_WRITE) or False - task_schema = pyarrow_to_schema( - df.schema, - name_mapping=name_mapping, - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, - format_version=table_metadata.format_version, - ) + task_schema = pyarrow_to_schema(df.schema, name_mapping=name_mapping, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us) if table_metadata.spec().is_unpartitioned(): yield from write_file( @@ -2770,11 +2615,9 @@ def _determine_partitions(spec: PartitionSpec, schema: Schema, arrow_table: pa.T for partition, name in zip(spec.fields, partition_fields): source_field = schema.find_field(partition.source_id) - full_field_name = schema.find_column_name(partition.source_id) - if full_field_name is None: - raise ValueError(f"Could not find column name for field ID: {partition.source_id}") - field_array = _get_field_from_arrow_table(arrow_table, full_field_name) - arrow_table = arrow_table.append_column(name, partition.transform.pyarrow_transform(source_field.field_type)(field_array)) + arrow_table = arrow_table.append_column( + name, partition.transform.pyarrow_transform(source_field.field_type)(arrow_table[source_field.name]) + ) unique_partition_fields = arrow_table.select(partition_fields).group_by(partition_fields).aggregate([]) @@ -2809,32 +2652,3 @@ def _determine_partitions(spec: PartitionSpec, schema: Schema, arrow_table: pa.T ) return table_partitions - - -def _get_field_from_arrow_table(arrow_table: pa.Table, field_path: str) -> pa.Array: - """Get a field from an Arrow table, supporting both literal field names and nested field paths. - - This function handles two cases: - 1. Literal field names that may contain dots (e.g., "some.id") - 2. Nested field paths using dot notation (e.g., "bar.baz" for nested access) - - Args: - arrow_table: The Arrow table containing the field - field_path: Field name or dot-separated path - - Returns: - The field as a PyArrow Array - - Raises: - KeyError: If the field path cannot be resolved - """ - # Try exact column name match (handles field names containing literal dots) - if field_path in arrow_table.column_names: - return arrow_table[field_path] - - # If not found as exact name, treat as nested field path - path_parts = field_path.split(".") - # Get the struct column from the table (e.g., "bar" from "bar.baz") - field_array = arrow_table[path_parts[0]] - # Navigate into the struct using the remaining path parts - return pc.struct_field(field_array, path_parts[1:]) diff --git a/pyiceberg/manifest.py b/pyiceberg/manifest.py index a92d944811..5a32a6330c 100644 --- a/pyiceberg/manifest.py +++ b/pyiceberg/manifest.py @@ -37,7 +37,6 @@ from cachetools.keys import hashkey from pydantic_core import to_json -from pyiceberg.avro.codecs import AVRO_CODEC_KEY, AvroCompressionCodec from pyiceberg.avro.file import AvroFile, AvroOutputFile from pyiceberg.conversions import to_bytes from pyiceberg.exceptions import ValidationError @@ -98,7 +97,6 @@ class FileFormat(str, Enum): AVRO = "AVRO" PARQUET = "PARQUET" ORC = "ORC" - PUFFIN = "PUFFIN" @classmethod def _missing_(cls, value: object) -> Union[None, str]: @@ -290,129 +288,6 @@ def __repr__(self) -> str: doc="ID representing sort order for this file", ), ), - 3: StructType( - NestedField( - field_id=134, - name="content", - field_type=IntegerType(), - required=True, - doc="File format name: avro, orc, or parquet", - initial_default=DataFileContent.DATA, - ), - NestedField(field_id=100, name="file_path", field_type=StringType(), required=True, doc="Location URI with FS scheme"), - NestedField( - field_id=101, - name="file_format", - field_type=StringType(), - required=True, - doc="File format name: avro, orc, or parquet", - ), - NestedField( - field_id=102, - name="partition", - field_type=StructType(), - required=True, - doc="Partition data tuple, schema based on the partition spec", - ), - NestedField(field_id=103, name="record_count", field_type=LongType(), required=True, doc="Number of records in the file"), - NestedField( - field_id=104, name="file_size_in_bytes", field_type=LongType(), required=True, doc="Total file size in bytes" - ), - NestedField( - field_id=108, - name="column_sizes", - field_type=MapType(key_id=117, key_type=IntegerType(), value_id=118, value_type=LongType()), - required=False, - doc="Map of column id to total size on disk", - ), - NestedField( - field_id=109, - name="value_counts", - field_type=MapType(key_id=119, key_type=IntegerType(), value_id=120, value_type=LongType()), - required=False, - doc="Map of column id to total count, including null and NaN", - ), - NestedField( - field_id=110, - name="null_value_counts", - field_type=MapType(key_id=121, key_type=IntegerType(), value_id=122, value_type=LongType()), - required=False, - doc="Map of column id to null value count", - ), - NestedField( - field_id=137, - name="nan_value_counts", - field_type=MapType(key_id=138, key_type=IntegerType(), value_id=139, value_type=LongType()), - required=False, - doc="Map of column id to number of NaN values in the column", - ), - NestedField( - field_id=125, - name="lower_bounds", - field_type=MapType(key_id=126, key_type=IntegerType(), value_id=127, value_type=BinaryType()), - required=False, - doc="Map of column id to lower bound", - ), - NestedField( - field_id=128, - name="upper_bounds", - field_type=MapType(key_id=129, key_type=IntegerType(), value_id=130, value_type=BinaryType()), - required=False, - doc="Map of column id to upper bound", - ), - NestedField( - field_id=131, name="key_metadata", field_type=BinaryType(), required=False, doc="Encryption key metadata blob" - ), - NestedField( - field_id=132, - name="split_offsets", - field_type=ListType(element_id=133, element_type=LongType(), element_required=True), - required=False, - doc="Splittable offsets", - ), - NestedField( - field_id=135, - name="equality_ids", - field_type=ListType(element_id=136, element_type=LongType(), element_required=True), - required=False, - doc="Field ids used to determine row equality in equality delete files.", - ), - NestedField( - field_id=140, - name="sort_order_id", - field_type=IntegerType(), - required=False, - doc="ID representing sort order for this file", - ), - NestedField( - field_id=142, - name="first_row_id", - field_type=LongType(), - required=False, - doc="The _row_id for the first row in the data file.", - ), - NestedField( - field_id=143, - name="referenced_data_file", - field_type=StringType(), - required=False, - doc="Fully qualified location (URI with FS scheme) of a data file that all deletes reference", - ), - NestedField( - field_id=144, - name="content_offset", - field_type=LongType(), - required=False, - doc="The offset in the file where the content starts.", - ), - NestedField( - field_id=145, - name="content_size_in_bytes", - field_type=LongType(), - required=False, - doc="The length of a referenced content stored in the file; required if content_offset is present", - ), - ), } @@ -446,85 +321,42 @@ def data_file_with_partition(partition_type: StructType, format_version: TableVe class DataFile(Record): - @classmethod - def from_args(cls, _table_format_version: TableVersion = DEFAULT_READ_VERSION, **arguments: Any) -> DataFile: - struct = DATA_FILE_TYPE[_table_format_version] - return super()._bind(struct, **arguments) - - @property - def content(self) -> DataFileContent: - return self._data[0] - - @property - def file_path(self) -> str: - return self._data[1] - - @property - def file_format(self) -> FileFormat: - return self._data[2] - - @property - def partition(self) -> Record: - return self._data[3] - - @property - def record_count(self) -> int: - return self._data[4] - - @property - def file_size_in_bytes(self) -> int: - return self._data[5] - - @property - def column_sizes(self) -> Dict[int, int]: - return self._data[6] - - @property - def value_counts(self) -> Dict[int, int]: - return self._data[7] - - @property - def null_value_counts(self) -> Dict[int, int]: - return self._data[8] - - @property - def nan_value_counts(self) -> Dict[int, int]: - return self._data[9] - - @property - def lower_bounds(self) -> Dict[int, bytes]: - return self._data[10] - - @property - def upper_bounds(self) -> Dict[int, bytes]: - return self._data[11] - - @property - def key_metadata(self) -> Optional[bytes]: - return self._data[12] - - @property - def split_offsets(self) -> Optional[List[int]]: - return self._data[13] - - @property - def equality_ids(self) -> Optional[List[int]]: - return self._data[14] - - @property - def sort_order_id(self) -> Optional[int]: - return self._data[15] - - # Spec ID should not be stored in the file - _spec_id: int - - @property - def spec_id(self) -> int: - return self._spec_id - - @spec_id.setter - def spec_id(self, value: int) -> None: - self._spec_id = value + __slots__ = ( + "content", + "file_path", + "file_format", + "partition", + "record_count", + "file_size_in_bytes", + "column_sizes", + "value_counts", + "null_value_counts", + "nan_value_counts", + "lower_bounds", + "upper_bounds", + "key_metadata", + "split_offsets", + "equality_ids", + "sort_order_id", + "spec_id", + ) + content: DataFileContent + file_path: str + file_format: FileFormat + partition: Record + record_count: int + file_size_in_bytes: int + column_sizes: Dict[int, int] + value_counts: Dict[int, int] + null_value_counts: Dict[int, int] + nan_value_counts: Dict[int, int] + lower_bounds: Dict[int, bytes] + upper_bounds: Dict[int, bytes] + key_metadata: Optional[bytes] + split_offsets: Optional[List[int]] + equality_ids: Optional[List[int]] + sort_order_id: Optional[int] + spec_id: int def __setattr__(self, name: str, value: Any) -> None: """Assign a key/value to a DataFile.""" @@ -533,6 +365,12 @@ def __setattr__(self, name: str, value: Any) -> None: value = FileFormat[value] super().__setattr__(name, value) + def __init__(self, format_version: TableVersion = DEFAULT_READ_VERSION, *data: Any, **named_data: Any) -> None: + super().__init__( + *data, + **{"struct": DATA_FILE_TYPE[format_version], **named_data}, + ) + def __hash__(self) -> int: """Return the hash of the file path.""" return hash(self.file_path) @@ -558,13 +396,6 @@ def __eq__(self, other: Any) -> bool: NestedField(4, "file_sequence_number", LongType(), required=False), NestedField(2, "data_file", DATA_FILE_TYPE[2], required=True), ), - 3: Schema( - NestedField(0, "status", IntegerType(), required=True), - NestedField(1, "snapshot_id", LongType(), required=False), - NestedField(3, "sequence_number", LongType(), required=False), - NestedField(4, "file_sequence_number", LongType(), required=False), - NestedField(2, "data_file", DATA_FILE_TYPE[3], required=True), - ), } MANIFEST_ENTRY_SCHEMAS_STRUCT = {format_version: schema.as_struct() for format_version, schema in MANIFEST_ENTRY_SCHEMAS.items()} @@ -580,49 +411,53 @@ def manifest_entry_schema_with_data_file(format_version: TableVersion, data_file class ManifestEntry(Record): - @classmethod - def from_args(cls, _table_format_version: TableVersion = DEFAULT_READ_VERSION, **arguments: Any) -> ManifestEntry: - return super()._bind(**arguments, struct=MANIFEST_ENTRY_SCHEMAS_STRUCT[_table_format_version]) + __slots__ = ("status", "snapshot_id", "sequence_number", "file_sequence_number", "data_file") + status: ManifestEntryStatus + snapshot_id: Optional[int] + sequence_number: Optional[int] + file_sequence_number: Optional[int] + data_file: DataFile - @property - def status(self) -> ManifestEntryStatus: - return self._data[0] - - @status.setter - def status(self, value: ManifestEntryStatus) -> None: - self._data[0] = value - - @property - def snapshot_id(self) -> Optional[int]: - return self._data[1] - - @snapshot_id.setter - def snapshot_id(self, value: int) -> None: - self._data[0] = value - - @property - def sequence_number(self) -> Optional[int]: - return self._data[2] - - @sequence_number.setter - def sequence_number(self, value: int) -> None: - self._data[2] = value + def __init__(self, *data: Any, **named_data: Any) -> None: + super().__init__(*data, **{"struct": MANIFEST_ENTRY_SCHEMAS_STRUCT[DEFAULT_READ_VERSION], **named_data}) - @property - def file_sequence_number(self) -> Optional[int]: - return self._data[3] - - @file_sequence_number.setter - def file_sequence_number(self, value: int) -> None: - self._data[3] = value + def _wrap( + self, + new_status: ManifestEntryStatus, + new_snapshot_id: Optional[int], + new_sequence_number: Optional[int], + new_file_sequence_number: Optional[int], + new_file: DataFile, + ) -> ManifestEntry: + self.status = new_status + self.snapshot_id = new_snapshot_id + self.sequence_number = new_sequence_number + self.file_sequence_number = new_file_sequence_number + self.data_file = new_file + return self - @property - def data_file(self) -> DataFile: - return self._data[4] + def _wrap_append( + self, new_snapshot_id: Optional[int], new_sequence_number: Optional[int], new_file: DataFile + ) -> ManifestEntry: + return self._wrap(ManifestEntryStatus.ADDED, new_snapshot_id, new_sequence_number, None, new_file) - @data_file.setter - def data_file(self, value: DataFile) -> None: - self._data[4] = value + def _wrap_delete( + self, + new_snapshot_id: Optional[int], + new_sequence_number: Optional[int], + new_file_sequence_number: Optional[int], + new_file: DataFile, + ) -> ManifestEntry: + return self._wrap(ManifestEntryStatus.DELETED, new_snapshot_id, new_sequence_number, new_file_sequence_number, new_file) + + def _wrap_existing( + self, + new_snapshot_id: Optional[int], + new_sequence_number: Optional[int], + new_file_sequence_number: Optional[int], + new_file: DataFile, + ) -> ManifestEntry: + return self._wrap(ManifestEntryStatus.EXISTING, new_snapshot_id, new_sequence_number, new_file_sequence_number, new_file) PARTITION_FIELD_SUMMARY_TYPE = StructType( @@ -634,25 +469,14 @@ def data_file(self, value: DataFile) -> None: class PartitionFieldSummary(Record): - @classmethod - def from_args(cls, **arguments: Any) -> PartitionFieldSummary: - return super()._bind(**arguments, struct=PARTITION_FIELD_SUMMARY_TYPE) + __slots__ = ("contains_null", "contains_nan", "lower_bound", "upper_bound") + contains_null: bool + contains_nan: Optional[bool] + lower_bound: Optional[bytes] + upper_bound: Optional[bytes] - @property - def contains_null(self) -> bool: - return self._data[0] - - @property - def contains_nan(self) -> Optional[bool]: - return self._data[1] - - @property - def lower_bound(self) -> Optional[bytes]: - return self._data[2] - - @property - def upper_bound(self) -> Optional[bytes]: - return self._data[3] + def __init__(self, *data: Any, **named_data: Any) -> None: + super().__init__(*data, **{"struct": PARTITION_FIELD_SUMMARY_TYPE, **named_data}) class PartitionFieldStats: @@ -671,10 +495,10 @@ def __init__(self, iceberg_type: PrimitiveType) -> None: def to_summary(self) -> PartitionFieldSummary: return PartitionFieldSummary( - self._contains_null, - self._contains_nan, - to_bytes(self._type, self._min) if self._min is not None else None, - to_bytes(self._type, self._max) if self._max is not None else None, + contains_null=self._contains_null, + contains_nan=self._contains_nan, + lower_bound=to_bytes(self._type, self._min) if self._min is not None else None, + upper_bound=to_bytes(self._type, self._max) if self._max is not None else None, ) def update(self, value: Any) -> None: @@ -735,24 +559,6 @@ def construct_partition_summaries(spec: PartitionSpec, schema: Schema, partition NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), NestedField(519, "key_metadata", BinaryType(), required=False), ), - 3: Schema( - NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), - NestedField(501, "manifest_length", LongType(), required=True), - NestedField(502, "partition_spec_id", IntegerType(), required=True), - NestedField(517, "content", IntegerType(), required=True, initial_default=ManifestContent.DATA), - NestedField(515, "sequence_number", LongType(), required=True, initial_default=0), - NestedField(516, "min_sequence_number", LongType(), required=True, initial_default=0), - NestedField(503, "added_snapshot_id", LongType(), required=True), - NestedField(504, "added_files_count", IntegerType(), required=True), - NestedField(505, "existing_files_count", IntegerType(), required=True), - NestedField(506, "deleted_files_count", IntegerType(), required=True), - NestedField(512, "added_rows_count", LongType(), required=True), - NestedField(513, "existing_rows_count", LongType(), required=True), - NestedField(514, "deleted_rows_count", LongType(), required=True), - NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), - NestedField(519, "key_metadata", BinaryType(), required=False), - NestedField(520, "first_row_id", LongType(), required=False), - ), } MANIFEST_LIST_FILE_STRUCTS = {format_version: schema.as_struct() for format_version, schema in MANIFEST_LIST_FILE_SCHEMAS.items()} @@ -764,77 +570,41 @@ def construct_partition_summaries(spec: PartitionSpec, schema: Schema, partition class ManifestFile(Record): - @classmethod - def from_args(cls, _table_format_version: TableVersion = DEFAULT_READ_VERSION, **arguments: Any) -> ManifestFile: - return super()._bind(**arguments, struct=MANIFEST_LIST_FILE_SCHEMAS[_table_format_version]) - - @property - def manifest_path(self) -> str: - return self._data[0] - - @property - def manifest_length(self) -> int: - return self._data[1] - - @property - def partition_spec_id(self) -> int: - return self._data[2] - - @property - def content(self) -> ManifestContent: - return self._data[3] - - @property - def sequence_number(self) -> int: - return self._data[4] - - @sequence_number.setter - def sequence_number(self, value: int) -> None: - self._data[4] = value - - @property - def min_sequence_number(self) -> int: - return self._data[5] - - @min_sequence_number.setter - def min_sequence_number(self, value: int) -> None: - self._data[5] = value - - @property - def added_snapshot_id(self) -> Optional[int]: - return self._data[6] - - @property - def added_files_count(self) -> Optional[int]: - return self._data[7] - - @property - def existing_files_count(self) -> Optional[int]: - return self._data[8] - - @property - def deleted_files_count(self) -> Optional[int]: - return self._data[9] - - @property - def added_rows_count(self) -> Optional[int]: - return self._data[10] - - @property - def existing_rows_count(self) -> Optional[int]: - return self._data[11] - - @property - def deleted_rows_count(self) -> Optional[int]: - return self._data[12] - - @property - def partitions(self) -> Optional[List[PartitionFieldSummary]]: - return self._data[13] - - @property - def key_metadata(self) -> Optional[bytes]: - return self._data[14] + __slots__ = ( + "manifest_path", + "manifest_length", + "partition_spec_id", + "content", + "sequence_number", + "min_sequence_number", + "added_snapshot_id", + "added_files_count", + "existing_files_count", + "deleted_files_count", + "added_rows_count", + "existing_rows_count", + "deleted_rows_count", + "partitions", + "key_metadata", + ) + manifest_path: str + manifest_length: int + partition_spec_id: int + content: ManifestContent + sequence_number: int + min_sequence_number: int + added_snapshot_id: int + added_files_count: Optional[int] + existing_files_count: Optional[int] + deleted_files_count: Optional[int] + added_rows_count: Optional[int] + existing_rows_count: Optional[int] + deleted_rows_count: Optional[int] + partitions: Optional[List[PartitionFieldSummary]] + key_metadata: Optional[bytes] + + def __init__(self, *data: Any, **named_data: Any) -> None: + super().__init__(*data, **{"struct": MANIFEST_LIST_FILE_STRUCTS[DEFAULT_READ_VERSION], **named_data}) def has_added_files(self) -> bool: return self.added_files_count is None or self.added_files_count > 0 @@ -866,14 +636,6 @@ def fetch_manifest_entry(self, io: FileIO, discard_deleted: bool = True) -> List if not discard_deleted or entry.status != ManifestEntryStatus.DELETED ] - def __eq__(self, other: Any) -> bool: - """Return the equality of two instances of the ManifestFile class.""" - return self.manifest_path == other.manifest_path if isinstance(other, ManifestFile) else False - - def __hash__(self) -> int: - """Return the hash of manifest_path.""" - return hash(self.manifest_path) - @cached(cache=LRUCache(maxsize=128), key=lambda io, manifest_list: hashkey(manifest_list)) def _manifests(io: FileIO, manifest_list: str) -> Tuple[ManifestFile, ...]: @@ -920,7 +682,7 @@ def _inherit_from_manifest(entry: ManifestEntry, manifest: ManifestFile) -> Mani """ # Inherit sequence numbers. # The snapshot_id is required in V1, inherit with V2 when null - if entry.snapshot_id is None and manifest.added_snapshot_id is not None: + if entry.snapshot_id is None: entry.snapshot_id = manifest.added_snapshot_id # in v1 tables, the sequence number is not persisted and can be safely defaulted to 0 @@ -955,16 +717,9 @@ class ManifestWriter(ABC): _deleted_rows: int _min_sequence_number: Optional[int] _partitions: List[Record] - _compression: AvroCompressionCodec + _reused_entry_wrapper: ManifestEntry - def __init__( - self, - spec: PartitionSpec, - schema: Schema, - output_file: OutputFile, - snapshot_id: int, - avro_compression: AvroCompressionCodec, - ) -> None: + def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int) -> None: self.closed = False self._spec = spec self._schema = schema @@ -979,7 +734,7 @@ def __init__( self._deleted_rows = 0 self._min_sequence_number = None self._partitions = [] - self._compression = avro_compression + self._reused_entry_wrapper = ManifestEntry() def __enter__(self) -> ManifestWriter: """Open the writer.""" @@ -1015,7 +770,6 @@ def _meta(self) -> Dict[str, str]: "partition-spec": to_json(self._spec.fields).decode("utf-8"), "partition-spec-id": str(self._spec.spec_id), "format-version": str(self.version), - AVRO_CODEC_KEY: self._compression, } def _with_partition(self, format_version: TableVersion) -> Schema: @@ -1041,7 +795,7 @@ def to_manifest_file(self) -> ManifestFile: # once the manifest file is generated, no more entries can be added self.closed = True min_sequence_number = self._min_sequence_number or UNASSIGNED_SEQ - return ManifestFile.from_args( + return ManifestFile( manifest_path=self._output_file.location, manifest_length=len(self._writer.output_file), partition_spec_id=self._spec.spec_id, @@ -1088,54 +842,36 @@ def add_entry(self, entry: ManifestEntry) -> ManifestWriter: def add(self, entry: ManifestEntry) -> ManifestWriter: if entry.sequence_number is not None and entry.sequence_number >= 0: - self.add_entry( - ManifestEntry.from_args( - snapshot_id=self._snapshot_id, sequence_number=entry.sequence_number, data_file=entry.data_file - ) - ) + self.add_entry(self._reused_entry_wrapper._wrap_append(self._snapshot_id, entry.sequence_number, entry.data_file)) else: - self.add_entry( - ManifestEntry.from_args( - status=ManifestEntryStatus.ADDED, snapshot_id=self._snapshot_id, data_file=entry.data_file - ) - ) + self.add_entry(self._reused_entry_wrapper._wrap_append(self._snapshot_id, None, entry.data_file)) return self def delete(self, entry: ManifestEntry) -> ManifestWriter: self.add_entry( - ManifestEntry.from_args( - status=ManifestEntryStatus.DELETED, - snapshot_id=self._snapshot_id, - sequence_number=entry.sequence_number, - file_sequence_number=entry.file_sequence_number, - data_file=entry.data_file, + self._reused_entry_wrapper._wrap_delete( + self._snapshot_id, entry.sequence_number, entry.file_sequence_number, entry.data_file ) ) return self def existing(self, entry: ManifestEntry) -> ManifestWriter: self.add_entry( - ManifestEntry.from_args( - status=ManifestEntryStatus.EXISTING, - snapshot_id=entry.snapshot_id, - sequence_number=entry.sequence_number, - file_sequence_number=entry.file_sequence_number, - data_file=entry.data_file, + self._reused_entry_wrapper._wrap_existing( + entry.snapshot_id, entry.sequence_number, entry.file_sequence_number, entry.data_file ) ) return self class ManifestWriterV1(ManifestWriter): - def __init__( - self, - spec: PartitionSpec, - schema: Schema, - output_file: OutputFile, - snapshot_id: int, - avro_compression: AvroCompressionCodec, - ): - super().__init__(spec, schema, output_file, snapshot_id, avro_compression) + def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int): + super().__init__( + spec, + schema, + output_file, + snapshot_id, + ) def content(self) -> ManifestContent: return ManifestContent.DATA @@ -1149,15 +885,8 @@ def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: class ManifestWriterV2(ManifestWriter): - def __init__( - self, - spec: PartitionSpec, - schema: Schema, - output_file: OutputFile, - snapshot_id: int, - avro_compression: AvroCompressionCodec, - ): - super().__init__(spec, schema, output_file, snapshot_id, avro_compression) + def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int): + super().__init__(spec, schema, output_file, snapshot_id) def content(self) -> ManifestContent: return ManifestContent.DATA @@ -1183,17 +912,12 @@ def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: def write_manifest( - format_version: TableVersion, - spec: PartitionSpec, - schema: Schema, - output_file: OutputFile, - snapshot_id: int, - avro_compression: AvroCompressionCodec, + format_version: TableVersion, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int ) -> ManifestWriter: if format_version == 1: - return ManifestWriterV1(spec, schema, output_file, snapshot_id, avro_compression) + return ManifestWriterV1(spec, schema, output_file, snapshot_id) elif format_version == 2: - return ManifestWriterV2(spec, schema, output_file, snapshot_id, avro_compression) + return ManifestWriterV2(spec, schema, output_file, snapshot_id) else: raise ValueError(f"Cannot write manifest for table version: {format_version}") @@ -1243,13 +967,7 @@ def add_manifests(self, manifest_files: List[ManifestFile]) -> ManifestListWrite class ManifestListWriterV1(ManifestListWriter): - def __init__( - self, - output_file: OutputFile, - snapshot_id: int, - parent_snapshot_id: Optional[int], - compression: AvroCompressionCodec, - ): + def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: Optional[int]): super().__init__( format_version=1, output_file=output_file, @@ -1257,7 +975,6 @@ def __init__( "snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id) if parent_snapshot_id is not None else "null", "format-version": "1", - AVRO_CODEC_KEY: compression, }, ) @@ -1271,14 +988,7 @@ class ManifestListWriterV2(ManifestListWriter): _commit_snapshot_id: int _sequence_number: int - def __init__( - self, - output_file: OutputFile, - snapshot_id: int, - parent_snapshot_id: Optional[int], - sequence_number: int, - compression: AvroCompressionCodec, - ): + def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: Optional[int], sequence_number: int): super().__init__( format_version=2, output_file=output_file, @@ -1287,7 +997,6 @@ def __init__( "parent-snapshot-id": str(parent_snapshot_id) if parent_snapshot_id is not None else "null", "sequence-number": str(sequence_number), "format-version": "2", - AVRO_CODEC_KEY: compression, }, ) self._commit_snapshot_id = snapshot_id @@ -1322,13 +1031,12 @@ def write_manifest_list( snapshot_id: int, parent_snapshot_id: Optional[int], sequence_number: Optional[int], - avro_compression: AvroCompressionCodec, ) -> ManifestListWriter: if format_version == 1: - return ManifestListWriterV1(output_file, snapshot_id, parent_snapshot_id, avro_compression) + return ManifestListWriterV1(output_file, snapshot_id, parent_snapshot_id) elif format_version == 2: if sequence_number is None: raise ValueError(f"Sequence-number is required for V2 tables: {sequence_number}") - return ManifestListWriterV2(output_file, snapshot_id, parent_snapshot_id, sequence_number, avro_compression) + return ManifestListWriterV2(output_file, snapshot_id, parent_snapshot_id, sequence_number) else: raise ValueError(f"Cannot write manifest list for table version: {format_version}") diff --git a/pyiceberg/partitioning.py b/pyiceberg/partitioning.py index dd707cea14..2bed2ce899 100644 --- a/pyiceberg/partitioning.py +++ b/pyiceberg/partitioning.py @@ -390,20 +390,18 @@ class PartitionKey: @cached_property def partition(self) -> Record: # partition key transformed with iceberg internal representation as input - iceberg_typed_key_values = [] + iceberg_typed_key_values = {} for raw_partition_field_value in self.field_values: partition_fields = self.partition_spec.source_id_to_fields_map[raw_partition_field_value.field.source_id] if len(partition_fields) != 1: raise ValueError(f"Cannot have redundant partitions: {partition_fields}") partition_field = partition_fields[0] - iceberg_typed_key_values.append( - partition_record_value( - partition_field=partition_field, - value=raw_partition_field_value.value, - schema=self.schema, - ) + iceberg_typed_key_values[partition_field.name] = partition_record_value( + partition_field=partition_field, + value=raw_partition_field_value.value, + schema=self.schema, ) - return Record(*iceberg_typed_key_values) + return Record(**iceberg_typed_key_values) def to_path(self) -> str: return self.partition_spec.partition_to_path(self.partition, self.schema) @@ -446,7 +444,7 @@ def _(type: IcebergType, value: Optional[Union[int, datetime]]) -> Optional[int] elif isinstance(value, datetime): return datetime_to_micros(value) else: - raise ValueError(f"Type not recognized: {value}") + raise ValueError(f"Unknown type: {value}") @_to_partition_representation.register(DateType) @@ -458,7 +456,7 @@ def _(type: IcebergType, value: Optional[Union[int, date]]) -> Optional[int]: elif isinstance(value, date): return date_to_days(value) else: - raise ValueError(f"Type not recognized: {value}") + raise ValueError(f"Unknown type: {value}") @_to_partition_representation.register(TimeType) @@ -467,17 +465,8 @@ def _(type: IcebergType, value: Optional[time]) -> Optional[int]: @_to_partition_representation.register(UUIDType) -def _(type: IcebergType, value: Optional[Union[uuid.UUID, int, bytes]]) -> Optional[Union[bytes, int]]: - if value is None: - return None - elif isinstance(value, bytes): - return value # IdentityTransform - elif isinstance(value, uuid.UUID): - return value.bytes # IdentityTransform - elif isinstance(value, int): - return value # BucketTransform - else: - raise ValueError(f"Type not recognized: {value}") +def _(type: IcebergType, value: Optional[uuid.UUID]) -> Optional[str]: + return str(value) if value is not None else None @_to_partition_representation.register(PrimitiveType) diff --git a/pyiceberg/schema.py b/pyiceberg/schema.py index 1eadc58361..5a373cb15f 100644 --- a/pyiceberg/schema.py +++ b/pyiceberg/schema.py @@ -57,12 +57,9 @@ PrimitiveType, StringType, StructType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) @@ -78,9 +75,6 @@ INITIAL_SCHEMA_ID = 0 -FIELD_ID_PROP = "field-id" -ICEBERG_FIELD_NAME_PROP = "iceberg-field-name" - class Schema(IcebergBaseModel): """A table Schema. @@ -367,21 +361,6 @@ def _validate_identifier_field(self, field_id: int) -> None: f"Cannot add field {field.name} as an identifier field: must not be nested in an optional field {parent}" ) - def check_format_version_compatibility(self, format_version: int) -> None: - """Check that the schema is compatible for the given table format version. - - Args: - format_version: The Iceberg table format version. - - Raises: - ValueError: If the schema is not compatible for the format version. - """ - for field in self._lazy_id_to_field.values(): - if format_version < field.field_type.minimum_format_version(): - raise ValueError( - f"{field.field_type} is only supported in {field.field_type.minimum_format_version()} or higher. Current format version is: {format_version}" - ) - class SchemaVisitor(Generic[T], ABC): def before_field(self, field: NestedField) -> None: @@ -542,12 +521,8 @@ def primitive(self, primitive: PrimitiveType, primitive_partner: Optional[P]) -> return self.visit_time(primitive, primitive_partner) elif isinstance(primitive, TimestampType): return self.visit_timestamp(primitive, primitive_partner) - elif isinstance(primitive, TimestampNanoType): - return self.visit_timestamp_ns(primitive, primitive_partner) elif isinstance(primitive, TimestamptzType): return self.visit_timestamptz(primitive, primitive_partner) - elif isinstance(primitive, TimestamptzNanoType): - return self.visit_timestamptz_ns(primitive, primitive_partner) elif isinstance(primitive, StringType): return self.visit_string(primitive, primitive_partner) elif isinstance(primitive, UUIDType): @@ -556,10 +531,8 @@ def primitive(self, primitive: PrimitiveType, primitive_partner: Optional[P]) -> return self.visit_fixed(primitive, primitive_partner) elif isinstance(primitive, BinaryType): return self.visit_binary(primitive, primitive_partner) - elif isinstance(primitive, UnknownType): - return self.visit_unknown(primitive, primitive_partner) else: - raise ValueError(f"Type not recognized: {primitive}") + raise ValueError(f"Unknown type: {primitive}") @abstractmethod def visit_boolean(self, boolean_type: BooleanType, partner: Optional[P]) -> T: @@ -597,18 +570,10 @@ def visit_time(self, time_type: TimeType, partner: Optional[P]) -> T: def visit_timestamp(self, timestamp_type: TimestampType, partner: Optional[P]) -> T: """Visit a TimestampType.""" - @abstractmethod - def visit_timestamp_ns(self, timestamp_ns_type: TimestampNanoType, partner: Optional[P]) -> T: - """Visit a TimestampNanoType.""" - @abstractmethod def visit_timestamptz(self, timestamptz_type: TimestamptzType, partner: Optional[P]) -> T: """Visit a TimestamptzType.""" - @abstractmethod - def visit_timestamptz_ns(self, timestamptz_ns_type: TimestamptzNanoType, partner: Optional[P]) -> T: - """Visit a TimestamptzNanoType.""" - @abstractmethod def visit_string(self, string_type: StringType, partner: Optional[P]) -> T: """Visit a StringType.""" @@ -625,10 +590,6 @@ def visit_fixed(self, fixed_type: FixedType, partner: Optional[P]) -> T: def visit_binary(self, binary_type: BinaryType, partner: Optional[P]) -> T: """Visit a BinaryType.""" - @abstractmethod - def visit_unknown(self, unknown_type: UnknownType, partner: Optional[P]) -> T: - """Visit a UnknownType.""" - class PartnerAccessor(Generic[P], ABC): @abstractmethod @@ -738,22 +699,16 @@ def primitive(self, primitive: PrimitiveType) -> T: return self.visit_time(primitive) elif isinstance(primitive, TimestampType): return self.visit_timestamp(primitive) - elif isinstance(primitive, TimestampNanoType): - return self.visit_timestamp_ns(primitive) elif isinstance(primitive, TimestamptzType): return self.visit_timestamptz(primitive) - elif isinstance(primitive, TimestamptzNanoType): - return self.visit_timestamptz_ns(primitive) elif isinstance(primitive, StringType): return self.visit_string(primitive) elif isinstance(primitive, UUIDType): return self.visit_uuid(primitive) elif isinstance(primitive, BinaryType): return self.visit_binary(primitive) - elif isinstance(primitive, UnknownType): - return self.visit_unknown(primitive) else: - raise ValueError(f"Type not recognized: {primitive}") + raise ValueError(f"Unknown type: {primitive}") @abstractmethod def visit_fixed(self, fixed_type: FixedType) -> T: @@ -795,18 +750,10 @@ def visit_time(self, time_type: TimeType) -> T: def visit_timestamp(self, timestamp_type: TimestampType) -> T: """Visit a TimestampType.""" - @abstractmethod - def visit_timestamp_ns(self, timestamp_type: TimestampNanoType) -> T: - """Visit a TimestampNanoType.""" - @abstractmethod def visit_timestamptz(self, timestamptz_type: TimestamptzType) -> T: """Visit a TimestamptzType.""" - @abstractmethod - def visit_timestamptz_ns(self, timestamptz_ns_type: TimestamptzNanoType) -> T: - """Visit a TimestamptzNanoType.""" - @abstractmethod def visit_string(self, string_type: StringType) -> T: """Visit a StringType.""" @@ -819,10 +766,6 @@ def visit_uuid(self, uuid_type: UUIDType) -> T: def visit_binary(self, binary_type: BinaryType) -> T: """Visit a BinaryType.""" - @abstractmethod - def visit_unknown(self, unknown_type: UnknownType) -> T: - """Visit a UnknownType.""" - @dataclass(init=True, eq=True, frozen=True) class Accessor: @@ -1243,7 +1186,6 @@ class _BuildPositionAccessors(SchemaVisitor[Dict[Position, Accessor]]): ... 1: Accessor(position=1, inner=None), ... 5: Accessor(position=2, inner=Accessor(position=0, inner=None)), ... 6: Accessor(position=2, inner=Accessor(position=1, inner=None)) - ... 3: Accessor(position=2, inner=None), ... } >>> result == expected True @@ -1259,7 +1201,8 @@ def struct(self, struct: StructType, field_results: List[Dict[Position, Accessor if field_results[position]: for inner_field_id, acc in field_results[position].items(): result[inner_field_id] = Accessor(position, inner=acc) - result[field.field_id] = Accessor(position) + else: + result[field.field_id] = Accessor(position) return result @@ -1359,29 +1302,14 @@ def primitive(self, primitive: PrimitiveType) -> PrimitiveType: # Implementation copied from Apache Iceberg repo. def make_compatible_name(name: str) -> str: - """Make a field name compatible with Avro specification. - - This function sanitizes field names to comply with Avro naming rules: - - Names must start with [A-Za-z_] - - Subsequent characters must be [A-Za-z0-9_] - - Invalid characters are replaced with _xHHHH where HHHH is the hex code. - Names starting with digits get a leading underscore. - - Args: - name: The original field name - - Returns: - A sanitized name that complies with Avro specification - """ if not _valid_avro_name(name): return _sanitize_name(name) return name def _valid_avro_name(name: str) -> bool: - if not len(name): - raise ValueError("Can not validate empty avro name") + length = len(name) + assert length > 0, ValueError("Can not validate empty avro name") first = name[0] if not (first.isalpha() or first == "_"): return False @@ -1409,9 +1337,7 @@ def _sanitize_name(name: str) -> str: def _sanitize_char(character: str) -> str: - if character.isdigit(): - return "_" + character - return "_x" + hex(ord(character))[2:].upper() + return "_" + character if character.isdigit() else "_x" + hex(ord(character))[2:].upper() def sanitize_column_names(schema: Schema) -> Schema: diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 92bbd60358..8ff299ce6a 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -17,7 +17,6 @@ from __future__ import annotations import itertools -import os import uuid import warnings from abc import ABC, abstractmethod @@ -63,6 +62,7 @@ manifest_evaluator, ) from pyiceberg.io import FileIO, load_file_io +from pyiceberg.io.pyarrow import ArrowScan, expression_to_pyarrow, schema_to_pyarrow from pyiceberg.manifest import ( POSITIONAL_DELETE_SCHEMA, DataFile, @@ -80,7 +80,6 @@ from pyiceberg.schema import Schema from pyiceberg.table.inspect import InspectTable from pyiceberg.table.locations import LocationProvider, load_location_provider -from pyiceberg.table.maintenance import MaintenanceTable from pyiceberg.table.metadata import ( INITIAL_SEQUENCE_NUMBER, TableMetadata, @@ -88,7 +87,7 @@ from pyiceberg.table.name_mapping import ( NameMapping, ) -from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef +from pyiceberg.table.refs import SnapshotRef from pyiceberg.table.snapshots import ( Snapshot, SnapshotLogEntry, @@ -116,7 +115,11 @@ update_table_metadata, ) from pyiceberg.table.update.schema import UpdateSchema -from pyiceberg.table.update.snapshot import ManageSnapshots, UpdateSnapshot, _FastAppendFiles +from pyiceberg.table.update.snapshot import ( + ManageSnapshots, + UpdateSnapshot, + _FastAppendFiles, +) from pyiceberg.table.update.spec import UpdateSpec from pyiceberg.table.update.statistics import UpdateStatistics from pyiceberg.transforms import IdentityTransform @@ -138,14 +141,12 @@ from pyiceberg.utils.properties import property_as_bool if TYPE_CHECKING: - import bodo.pandas as bd import daft import pandas as pd import polars as pl import pyarrow as pa import ray from duckdb import DuckDBPyConnection - from pyiceberg_core.datafusion import IcebergDataFusionTable from pyiceberg.catalog import Catalog @@ -191,9 +192,6 @@ class TableProperties: WRITE_TARGET_FILE_SIZE_BYTES = "write.target-file-size-bytes" WRITE_TARGET_FILE_SIZE_BYTES_DEFAULT = 512 * 1024 * 1024 # 512 MB - WRITE_AVRO_COMPRESSION = "write.avro.compression-codec" - WRITE_AVRO_COMPRESSION_DEFAULT = "gzip" - DEFAULT_WRITE_METRICS_MODE = "write.metadata.metrics.default" DEFAULT_WRITE_METRICS_MODE_DEFAULT = "truncate(16)" @@ -205,7 +203,7 @@ class TableProperties: WRITE_PY_LOCATION_PROVIDER_IMPL = "write.py-location-provider.impl" OBJECT_STORE_ENABLED = "write.object-storage.enabled" - OBJECT_STORE_ENABLED_DEFAULT = False + OBJECT_STORE_ENABLED_DEFAULT = True WRITE_OBJECT_STORE_PARTITIONED_PATHS = "write.object-storage.partitioned-paths" WRITE_OBJECT_STORE_PARTITIONED_PATHS_DEFAULT = True @@ -220,7 +218,7 @@ class TableProperties: DEFAULT_NAME_MAPPING = "schema.name-mapping.default" FORMAT_VERSION = "format-version" - DEFAULT_FORMAT_VERSION: TableVersion = 2 + DEFAULT_FORMAT_VERSION = 2 MANIFEST_TARGET_SIZE_BYTES = "commit.manifest.target-size-bytes" MANIFEST_TARGET_SIZE_BYTES_DEFAULT = 8 * 1024 * 1024 # 8 MB @@ -246,6 +244,7 @@ class TableProperties: class Transaction: _table: Table + table_metadata: TableMetadata _autocommit: bool _updates: Tuple[TableUpdate, ...] _requirements: Tuple[TableRequirement, ...] @@ -257,15 +256,12 @@ def __init__(self, table: Table, autocommit: bool = False): table: The table that will be altered. autocommit: Option to automatically commit the changes when they are staged. """ + self.table_metadata = table.metadata self._table = table self._autocommit = autocommit self._updates = () self._requirements = () - @property - def table_metadata(self) -> TableMetadata: - return update_table_metadata(self._table.metadata, self._updates) - def __enter__(self) -> Transaction: """Start a transaction to update the table.""" return self @@ -291,8 +287,12 @@ def _apply(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[TableRequ if type(new_requirement) not in existing_requirements: self._requirements = self._requirements + (new_requirement,) + self.table_metadata = update_table_metadata(self.table_metadata, updates) + if self._autocommit: self.commit_transaction() + self._updates = () + self._requirements = () return self @@ -398,7 +398,7 @@ def _build_partition_predicate(self, partition_records: Set[Record]) -> BooleanE expr = Or(expr, match_partition_expression) return expr - def _append_snapshot_producer(self, snapshot_properties: Dict[str, str], branch: Optional[str]) -> _FastAppendFiles: + def _append_snapshot_producer(self, snapshot_properties: Dict[str, str]) -> _FastAppendFiles: """Determine the append type based on table properties. Args: @@ -411,7 +411,7 @@ def _append_snapshot_producer(self, snapshot_properties: Dict[str, str], branch: TableProperties.MANIFEST_MERGE_ENABLED, TableProperties.MANIFEST_MERGE_ENABLED_DEFAULT, ) - update_snapshot = self.update_snapshot(snapshot_properties=snapshot_properties, branch=branch) + update_snapshot = self.update_snapshot(snapshot_properties=snapshot_properties) return update_snapshot.merge_append() if manifest_merge_enabled else update_snapshot.fast_append() def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: @@ -431,34 +431,21 @@ def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive name_mapping=self.table_metadata.name_mapping(), ) - def update_snapshot(self, snapshot_properties: Dict[str, str] = EMPTY_DICT, branch: Optional[str] = None) -> UpdateSnapshot: + def update_snapshot(self, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> UpdateSnapshot: """Create a new UpdateSnapshot to produce a new snapshot for the table. Returns: A new UpdateSnapshot """ - if branch is None: - branch = MAIN_BRANCH - - return UpdateSnapshot(self, io=self._table.io, branch=branch, snapshot_properties=snapshot_properties) - - def update_statistics(self) -> UpdateStatistics: - """ - Create a new UpdateStatistics to update the statistics of the table. - - Returns: - A new UpdateStatistics - """ - return UpdateStatistics(transaction=self) + return UpdateSnapshot(self, io=self._table.io, snapshot_properties=snapshot_properties) - def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT, branch: Optional[str] = None) -> None: + def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> None: """ Shorthand API for appending a PyArrow table to a table transaction. Args: df: The Arrow dataframe that will be appended to overwrite the table snapshot_properties: Custom properties to be added to the snapshot summary - branch: Branch Reference to run the append operation """ try: import pyarrow as pa @@ -478,13 +465,10 @@ def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT, ) downcast_ns_timestamp_to_us = Config().get_bool(DOWNCAST_NS_TIMESTAMP_TO_US_ON_WRITE) or False _check_pyarrow_schema_compatible( - self.table_metadata.schema(), - provided_schema=df.schema, - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, - format_version=self.table_metadata.format_version, + self.table_metadata.schema(), provided_schema=df.schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us ) - with self._append_snapshot_producer(snapshot_properties, branch=branch) as append_files: + with self._append_snapshot_producer(snapshot_properties) as append_files: # skip writing data files if the dataframe is empty if df.shape[0] > 0: data_files = list( @@ -495,9 +479,7 @@ def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT, for data_file in data_files: append_files.append_data_file(data_file) - def dynamic_partition_overwrite( - self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT, branch: Optional[str] = None - ) -> None: + def dynamic_partition_overwrite(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> None: """ Shorthand for overwriting existing partitions with a PyArrow table. @@ -508,7 +490,6 @@ def dynamic_partition_overwrite( Args: df: The Arrow dataframe that will be used to overwrite the table snapshot_properties: Custom properties to be added to the snapshot summary - branch: Branch Reference to run the dynamic partition overwrite operation """ try: import pyarrow as pa @@ -531,10 +512,7 @@ def dynamic_partition_overwrite( downcast_ns_timestamp_to_us = Config().get_bool(DOWNCAST_NS_TIMESTAMP_TO_US_ON_WRITE) or False _check_pyarrow_schema_compatible( - self.table_metadata.schema(), - provided_schema=df.schema, - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, - format_version=self.table_metadata.format_version, + self.table_metadata.schema(), provided_schema=df.schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us ) # If dataframe does not have data, there is no need to overwrite @@ -550,9 +528,9 @@ def dynamic_partition_overwrite( partitions_to_overwrite = {data_file.partition for data_file in data_files} delete_filter = self._build_partition_predicate(partition_records=partitions_to_overwrite) - self.delete(delete_filter=delete_filter, snapshot_properties=snapshot_properties, branch=branch) + self.delete(delete_filter=delete_filter, snapshot_properties=snapshot_properties) - with self._append_snapshot_producer(snapshot_properties, branch=branch) as append_files: + with self._append_snapshot_producer(snapshot_properties) as append_files: append_files.commit_uuid = append_snapshot_commit_uuid for data_file in data_files: append_files.append_data_file(data_file) @@ -563,7 +541,6 @@ def overwrite( overwrite_filter: Union[BooleanExpression, str] = ALWAYS_TRUE, snapshot_properties: Dict[str, str] = EMPTY_DICT, case_sensitive: bool = True, - branch: Optional[str] = None, ) -> None: """ Shorthand for adding a table overwrite with a PyArrow table to the transaction. @@ -578,9 +555,8 @@ def overwrite( df: The Arrow dataframe that will be used to overwrite the table overwrite_filter: ALWAYS_TRUE when you overwrite all the data, or a boolean expression in case of a partial overwrite - snapshot_properties: Custom properties to be added to the snapshot summary case_sensitive: A bool determine if the provided `overwrite_filter` is case-sensitive - branch: Branch Reference to run the overwrite operation + snapshot_properties: Custom properties to be added to the snapshot summary """ try: import pyarrow as pa @@ -600,22 +576,12 @@ def overwrite( ) downcast_ns_timestamp_to_us = Config().get_bool(DOWNCAST_NS_TIMESTAMP_TO_US_ON_WRITE) or False _check_pyarrow_schema_compatible( - self.table_metadata.schema(), - provided_schema=df.schema, - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, - format_version=self.table_metadata.format_version, + self.table_metadata.schema(), provided_schema=df.schema, downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us ) - if overwrite_filter != AlwaysFalse(): - # Only delete when the filter is != AlwaysFalse - self.delete( - delete_filter=overwrite_filter, - case_sensitive=case_sensitive, - snapshot_properties=snapshot_properties, - branch=branch, - ) + self.delete(delete_filter=overwrite_filter, case_sensitive=case_sensitive, snapshot_properties=snapshot_properties) - with self._append_snapshot_producer(snapshot_properties, branch=branch) as append_files: + with self._append_snapshot_producer(snapshot_properties) as append_files: # skip writing data files if the dataframe is empty if df.shape[0] > 0: data_files = _dataframe_to_data_files( @@ -629,7 +595,6 @@ def delete( delete_filter: Union[str, BooleanExpression], snapshot_properties: Dict[str, str] = EMPTY_DICT, case_sensitive: bool = True, - branch: Optional[str] = None, ) -> None: """ Shorthand for deleting record from a table. @@ -643,7 +608,6 @@ def delete( delete_filter: A boolean expression to delete rows from a table snapshot_properties: Custom properties to be added to the snapshot summary case_sensitive: A bool determine if the provided `delete_filter` is case-sensitive - branch: Branch Reference to run the delete operation """ from pyiceberg.io.pyarrow import ( ArrowScan, @@ -660,7 +624,7 @@ def delete( if isinstance(delete_filter, str): delete_filter = _parse_row_filter(delete_filter) - with self.update_snapshot(snapshot_properties=snapshot_properties, branch=branch).delete() as delete_snapshot: + with self.update_snapshot(snapshot_properties=snapshot_properties).delete() as delete_snapshot: delete_snapshot.delete_by_predicate(delete_filter, case_sensitive) # Check if there are any files that require an actual rewrite of a data file @@ -668,10 +632,7 @@ def delete( bound_delete_filter = bind(self.table_metadata.schema(), delete_filter, case_sensitive) preserve_row_filter = _expression_to_complementary_pyarrow(bound_delete_filter) - file_scan = self._scan(row_filter=delete_filter, case_sensitive=case_sensitive) - if branch is not None: - file_scan = file_scan.use_ref(branch) - files = file_scan.plan_files() + files = self._scan(row_filter=delete_filter, case_sensitive=case_sensitive).plan_files() commit_uuid = uuid.uuid4() counter = itertools.count(0) @@ -713,9 +674,7 @@ def delete( ) if len(replaced_files) > 0: - with self.update_snapshot( - snapshot_properties=snapshot_properties, branch=branch - ).overwrite() as overwrite_snapshot: + with self.update_snapshot(snapshot_properties=snapshot_properties).overwrite() as overwrite_snapshot: overwrite_snapshot.commit_uuid = commit_uuid for original_data_file, replaced_data_files in replaced_files: overwrite_snapshot.delete_data_file(original_data_file) @@ -725,150 +684,6 @@ def delete( if not delete_snapshot.files_affected and not delete_snapshot.rewrites_needed: warnings.warn("Delete operation did not match any records") - def upsert( - self, - df: pa.Table, - join_cols: Optional[List[str]] = None, - when_matched_update_all: bool = True, - when_not_matched_insert_all: bool = True, - case_sensitive: bool = True, - branch: Optional[str] = None, - ) -> UpsertResult: - """Shorthand API for performing an upsert to an iceberg table. - - Args: - - df: The input dataframe to upsert with the table's data. - join_cols: Columns to join on, if not provided, it will use the identifier-field-ids. - when_matched_update_all: Bool indicating to update rows that are matched but require an update due to a value in a non-key column changing - when_not_matched_insert_all: Bool indicating new rows to be inserted that do not match any existing rows in the table - case_sensitive: Bool indicating if the match should be case-sensitive - branch: Branch Reference to run the upsert operation - - To learn more about the identifier-field-ids: https://iceberg.apache.org/spec/#identifier-field-ids - - Example Use Cases: - Case 1: Both Parameters = True (Full Upsert) - Existing row found → Update it - New row found → Insert it - - Case 2: when_matched_update_all = False, when_not_matched_insert_all = True - Existing row found → Do nothing (no updates) - New row found → Insert it - - Case 3: when_matched_update_all = True, when_not_matched_insert_all = False - Existing row found → Update it - New row found → Do nothing (no inserts) - - Case 4: Both Parameters = False (No Merge Effect) - Existing row found → Do nothing - New row found → Do nothing - (Function effectively does nothing) - - - Returns: - An UpsertResult class (contains details of rows updated and inserted) - """ - try: - import pyarrow as pa # noqa: F401 - except ModuleNotFoundError as e: - raise ModuleNotFoundError("For writes PyArrow needs to be installed") from e - - from pyiceberg.io.pyarrow import expression_to_pyarrow - from pyiceberg.table import upsert_util - - if join_cols is None: - join_cols = [] - for field_id in self.table_metadata.schema().identifier_field_ids: - col = self.table_metadata.schema().find_column_name(field_id) - if col is not None: - join_cols.append(col) - else: - raise ValueError(f"Field-ID could not be found: {join_cols}") - - if len(join_cols) == 0: - raise ValueError("Join columns could not be found, please set identifier-field-ids or pass in explicitly.") - - if not when_matched_update_all and not when_not_matched_insert_all: - raise ValueError("no upsert options selected...exiting") - - if upsert_util.has_duplicate_rows(df, join_cols): - raise ValueError("Duplicate rows found in source dataset based on the key columns. No upsert executed") - - from pyiceberg.io.pyarrow import _check_pyarrow_schema_compatible - - downcast_ns_timestamp_to_us = Config().get_bool(DOWNCAST_NS_TIMESTAMP_TO_US_ON_WRITE) or False - _check_pyarrow_schema_compatible( - self.table_metadata.schema(), - provided_schema=df.schema, - downcast_ns_timestamp_to_us=downcast_ns_timestamp_to_us, - format_version=self.table_metadata.format_version, - ) - - # get list of rows that exist so we don't have to load the entire target table - matched_predicate = upsert_util.create_match_filter(df, join_cols) - - # We must use Transaction.table_metadata for the scan. This includes all uncommitted - but relevant - changes. - - matched_iceberg_record_batches_scan = DataScan( - table_metadata=self.table_metadata, - io=self._table.io, - row_filter=matched_predicate, - case_sensitive=case_sensitive, - ) - - if branch is not None: - matched_iceberg_record_batches_scan = matched_iceberg_record_batches_scan.use_ref(branch) - - matched_iceberg_record_batches = matched_iceberg_record_batches_scan.to_arrow_batch_reader() - - batches_to_overwrite = [] - overwrite_predicates = [] - rows_to_insert = df - - for batch in matched_iceberg_record_batches: - rows = pa.Table.from_batches([batch]) - - if when_matched_update_all: - # function get_rows_to_update is doing a check on non-key columns to see if any of the values have actually changed - # we don't want to do just a blanket overwrite for matched rows if the actual non-key column data hasn't changed - # this extra step avoids unnecessary IO and writes - rows_to_update = upsert_util.get_rows_to_update(df, rows, join_cols) - - if len(rows_to_update) > 0: - # build the match predicate filter - overwrite_mask_predicate = upsert_util.create_match_filter(rows_to_update, join_cols) - - batches_to_overwrite.append(rows_to_update) - overwrite_predicates.append(overwrite_mask_predicate) - - if when_not_matched_insert_all: - expr_match = upsert_util.create_match_filter(rows, join_cols) - expr_match_bound = bind(self.table_metadata.schema(), expr_match, case_sensitive=case_sensitive) - expr_match_arrow = expression_to_pyarrow(expr_match_bound) - - # Filter rows per batch. - rows_to_insert = rows_to_insert.filter(~expr_match_arrow) - - update_row_cnt = 0 - insert_row_cnt = 0 - - if batches_to_overwrite: - rows_to_update = pa.concat_tables(batches_to_overwrite) - update_row_cnt = len(rows_to_update) - self.overwrite( - rows_to_update, - overwrite_filter=Or(*overwrite_predicates) if len(overwrite_predicates) > 1 else overwrite_predicates[0], - branch=branch, - ) - - if when_not_matched_insert_all: - insert_row_cnt = len(rows_to_insert) - if rows_to_insert: - self.append(rows_to_insert, branch=branch) - - return UpsertResult(rows_updated=update_row_cnt, rows_inserted=insert_row_cnt) - def add_files( self, file_paths: List[str], snapshot_properties: Dict[str, str] = EMPTY_DICT, check_duplicate_files: bool = True ) -> None: @@ -890,7 +705,7 @@ def add_files( import pyarrow.compute as pc expr = pc.field("file_path").isin(file_paths) - referenced_files = [file["file_path"] for file in self._table.inspect.data_files().filter(expr).to_pylist()] + referenced_files = [file["file_path"] for file in self._table.inspect.files().filter(expr).to_pylist()] if referenced_files: raise ValueError(f"Cannot add files that are already referenced by table, files: {', '.join(referenced_files)}") @@ -948,15 +763,13 @@ def commit_transaction(self) -> Table: updates=self._updates, requirements=self._requirements, ) - - self._updates = () - self._requirements = () - - return self._table + return self._table + else: + return self._table class CreateTableTransaction(Transaction): - """A transaction that involves the creation of a new table.""" + """A transaction that involves the creation of a a new table.""" def _initial_changes(self, table_metadata: TableMetadata) -> None: """Set the initial changes that can reconstruct the initial table metadata when creating the CreateTableTransaction.""" @@ -1001,15 +814,11 @@ def commit_transaction(self) -> Table: Returns: The table with the updates applied. """ - if len(self._updates) > 0: - self._table._do_commit( # pylint: disable=W0212 - updates=self._updates, - requirements=(AssertCreate(),), - ) - - self._updates = () - self._requirements = () - + self._requirements = (AssertCreate(),) + self._table._do_commit( # pylint: disable=W0212 + updates=self._updates, + requirements=self._requirements, + ) return self._table @@ -1087,15 +896,6 @@ def inspect(self) -> InspectTable: """ return InspectTable(self) - @property - def maintenance(self) -> MaintenanceTable: - """Return the MaintenanceTable object for maintenance. - - Returns: - MaintenanceTable object based on this Table. - """ - return MaintenanceTable(self) - def refresh(self) -> Table: """Refresh the current table metadata. @@ -1313,7 +1113,6 @@ def upsert( when_matched_update_all: bool = True, when_not_matched_insert_all: bool = True, case_sensitive: bool = True, - branch: Optional[str] = None, ) -> UpsertResult: """Shorthand API for performing an upsert to an iceberg table. @@ -1324,7 +1123,6 @@ def upsert( when_matched_update_all: Bool indicating to update rows that are matched but require an update due to a value in a non-key column changing when_not_matched_insert_all: Bool indicating new rows to be inserted that do not match any existing rows in the table case_sensitive: Bool indicating if the match should be case-sensitive - branch: Branch Reference to run the upsert operation To learn more about the identifier-field-ids: https://iceberg.apache.org/spec/#identifier-field-ids @@ -1350,41 +1148,77 @@ def upsert( Returns: An UpsertResult class (contains details of rows updated and inserted) """ + from pyiceberg.table import upsert_util + + if join_cols is None: + join_cols = [] + for field_id in self.schema().identifier_field_ids: + col = self.schema().find_column_name(field_id) + if col is not None: + join_cols.append(col) + else: + raise ValueError(f"Field-ID could not be found: {join_cols}") + + if not when_matched_update_all and not when_not_matched_insert_all: + raise ValueError("no upsert options selected...exiting") + + if upsert_util.has_duplicate_rows(df, join_cols): + raise ValueError("Duplicate rows found in source dataset based on the key columns. No upsert executed") + + # get list of rows that exist so we don't have to load the entire target table + matched_predicate = upsert_util.create_match_filter(df, join_cols) + matched_iceberg_table = self.scan(row_filter=matched_predicate, case_sensitive=case_sensitive).to_arrow() + + update_row_cnt = 0 + insert_row_cnt = 0 + with self.transaction() as tx: - return tx.upsert( - df=df, - join_cols=join_cols, - when_matched_update_all=when_matched_update_all, - when_not_matched_insert_all=when_not_matched_insert_all, - case_sensitive=case_sensitive, - branch=branch, - ) + if when_matched_update_all: + # function get_rows_to_update is doing a check on non-key columns to see if any of the values have actually changed + # we don't want to do just a blanket overwrite for matched rows if the actual non-key column data hasn't changed + # this extra step avoids unnecessary IO and writes + rows_to_update = upsert_util.get_rows_to_update(df, matched_iceberg_table, join_cols) + + update_row_cnt = len(rows_to_update) + + # build the match predicate filter + overwrite_mask_predicate = upsert_util.create_match_filter(rows_to_update, join_cols) + + tx.overwrite(rows_to_update, overwrite_filter=overwrite_mask_predicate) + + if when_not_matched_insert_all: + expr_match = upsert_util.create_match_filter(matched_iceberg_table, join_cols) + expr_match_bound = bind(self.schema(), expr_match, case_sensitive=case_sensitive) + expr_match_arrow = expression_to_pyarrow(expr_match_bound) + rows_to_insert = df.filter(~expr_match_arrow) - def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT, branch: Optional[str] = None) -> None: + insert_row_cnt = len(rows_to_insert) + + tx.append(rows_to_insert) + + return UpsertResult(rows_updated=update_row_cnt, rows_inserted=insert_row_cnt) + + def append(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> None: """ Shorthand API for appending a PyArrow table to the table. Args: df: The Arrow dataframe that will be appended to overwrite the table snapshot_properties: Custom properties to be added to the snapshot summary - branch: Branch Reference to run the append operation """ with self.transaction() as tx: - tx.append(df=df, snapshot_properties=snapshot_properties, branch=branch) + tx.append(df=df, snapshot_properties=snapshot_properties) - def dynamic_partition_overwrite( - self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT, branch: Optional[str] = None - ) -> None: + def dynamic_partition_overwrite(self, df: pa.Table, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> None: """Shorthand for dynamic overwriting the table with a PyArrow table. Old partitions are auto detected and replaced with data files created for input arrow table. Args: df: The Arrow dataframe that will be used to overwrite the table snapshot_properties: Custom properties to be added to the snapshot summary - branch: Branch Reference to run the dynamic partition overwrite operation """ with self.transaction() as tx: - tx.dynamic_partition_overwrite(df=df, snapshot_properties=snapshot_properties, branch=branch) + tx.dynamic_partition_overwrite(df=df, snapshot_properties=snapshot_properties) def overwrite( self, @@ -1392,7 +1226,6 @@ def overwrite( overwrite_filter: Union[BooleanExpression, str] = ALWAYS_TRUE, snapshot_properties: Dict[str, str] = EMPTY_DICT, case_sensitive: bool = True, - branch: Optional[str] = None, ) -> None: """ Shorthand for overwriting the table with a PyArrow table. @@ -1409,15 +1242,10 @@ def overwrite( or a boolean expression in case of a partial overwrite snapshot_properties: Custom properties to be added to the snapshot summary case_sensitive: A bool determine if the provided `overwrite_filter` is case-sensitive - branch: Branch Reference to run the overwrite operation """ with self.transaction() as tx: tx.overwrite( - df=df, - overwrite_filter=overwrite_filter, - case_sensitive=case_sensitive, - snapshot_properties=snapshot_properties, - branch=branch, + df=df, overwrite_filter=overwrite_filter, case_sensitive=case_sensitive, snapshot_properties=snapshot_properties ) def delete( @@ -1425,7 +1253,6 @@ def delete( delete_filter: Union[BooleanExpression, str] = ALWAYS_TRUE, snapshot_properties: Dict[str, str] = EMPTY_DICT, case_sensitive: bool = True, - branch: Optional[str] = None, ) -> None: """ Shorthand for deleting rows from the table. @@ -1434,12 +1261,9 @@ def delete( delete_filter: The predicate that used to remove rows snapshot_properties: Custom properties to be added to the snapshot summary case_sensitive: A bool determine if the provided `delete_filter` is case-sensitive - branch: Branch Reference to run the delete operation """ with self.transaction() as tx: - tx.delete( - delete_filter=delete_filter, case_sensitive=case_sensitive, snapshot_properties=snapshot_properties, branch=branch - ) + tx.delete(delete_filter=delete_filter, case_sensitive=case_sensitive, snapshot_properties=snapshot_properties) def add_files( self, file_paths: List[str], snapshot_properties: Dict[str, str] = EMPTY_DICT, check_duplicate_files: bool = True @@ -1508,16 +1332,6 @@ def to_daft(self) -> daft.DataFrame: return daft.read_iceberg(self) - def to_bodo(self) -> bd.DataFrame: - """Read a bodo DataFrame lazily from this Iceberg table. - - Returns: - bd.DataFrame: Unmaterialized Bodo Dataframe created from the Iceberg table - """ - import bodo.pandas as bd - - return bd.read_iceberg_table(self) - def to_polars(self) -> pl.LazyFrame: """Lazily read from this Apache Iceberg table. @@ -1528,51 +1342,6 @@ def to_polars(self) -> pl.LazyFrame: return pl.scan_iceberg(self) - def __datafusion_table_provider__(self) -> "IcebergDataFusionTable": - """Return the DataFusion table provider PyCapsule interface. - - To support DataFusion features such as push down filtering, this function will return a PyCapsule - interface that conforms to the FFI Table Provider required by DataFusion. From an end user perspective - you should not need to call this function directly. Instead you can use ``register_table_provider`` in - the DataFusion SessionContext. - - Returns: - A PyCapsule DataFusion TableProvider interface. - - Example: - ```python - from datafusion import SessionContext - from pyiceberg.catalog import load_catalog - import pyarrow as pa - catalog = load_catalog("catalog", type="in-memory") - catalog.create_namespace_if_not_exists("default") - data = pa.table({"x": [1, 2, 3], "y": [4, 5, 6]}) - iceberg_table = catalog.create_table("default.test", schema=data.schema) - iceberg_table.append(data) - ctx = SessionContext() - ctx.register_table_provider("test", iceberg_table) - ctx.table("test").show() - ``` - Results in - ``` - DataFrame() - +---+---+ - | x | y | - +---+---+ - | 1 | 4 | - | 2 | 5 | - | 3 | 6 | - +---+---+ - ``` - """ - from pyiceberg_core.datafusion import IcebergDataFusionTable - - return IcebergDataFusionTable( - identifier=self.name(), - metadata_location=self.metadata_location, - file_io_properties=self.io.properties, - ).__datafusion_table_provider__() - class StaticTable(Table): """Load a table directly from a metadata file (i.e., without using a catalog).""" @@ -1581,27 +1350,8 @@ def refresh(self) -> Table: """Refresh the current table metadata.""" raise NotImplementedError("To be implemented") - @classmethod - def _metadata_location_from_version_hint(cls, metadata_location: str, properties: Properties = EMPTY_DICT) -> str: - version_hint_location = os.path.join(metadata_location, "metadata", "version-hint.text") - io = load_file_io(properties=properties, location=version_hint_location) - file = io.new_input(version_hint_location) - - with file.open() as stream: - content = stream.read().decode("utf-8") - - if content.endswith(".metadata.json"): - return os.path.join(metadata_location, "metadata", content) - elif content.isnumeric(): - return os.path.join(metadata_location, "metadata", "v%s.metadata.json").format(content) - else: - return os.path.join(metadata_location, "metadata", "%s.metadata.json").format(content) - @classmethod def from_metadata(cls, metadata_location: str, properties: Properties = EMPTY_DICT) -> StaticTable: - if not metadata_location.endswith(".metadata.json"): - metadata_location = StaticTable._metadata_location_from_version_hint(metadata_location, properties) - io = load_file_io(properties=properties, location=metadata_location) file = io.new_input(metadata_location) @@ -1724,14 +1474,7 @@ def to_polars(self) -> pl.DataFrame: ... def update(self: S, **overrides: Any) -> S: """Create a copy of this table scan with updated fields.""" - from inspect import signature - - # Extract those attributes that are constructor parameters. We don't use self.__dict__ as the kwargs to the - # constructors because it may contain additional attributes that are not part of the constructor signature. - params = signature(type(self).__init__).parameters.keys() - {"self"} # Skip "self" parameter - kwargs = {param: getattr(self, param) for param in params} # Assume parameters are attributes - - return type(self)(**{**kwargs, **overrides}) + return type(self)(**{**self.__dict__, **overrides}) def use_ref(self: S, name: str) -> S: if self.snapshot_id: @@ -1789,6 +1532,7 @@ def _open_manifest( io: FileIO, manifest: ManifestFile, partition_filter: Callable[[DataFile], bool], + residual_evaluator: Callable[[Record], BooleanExpression], metrics_evaluator: Callable[[DataFile], bool], ) -> List[ManifestEntry]: """Open a manifest file and return matching manifest entries. @@ -1881,11 +1625,13 @@ def _build_metrics_evaluator(self) -> Callable[[DataFile], bool]: def _build_residual_evaluator(self, spec_id: int) -> Callable[[DataFile], ResidualEvaluator]: spec = self.table_metadata.specs()[spec_id] - from pyiceberg.expressions.visitors import residual_evaluator_of - # The lambda created here is run in multiple threads. # So we avoid creating _EvaluatorExpression methods bound to a single # shared instance across multiple threads. + # return lambda data_file: (partition_schema, partition_expr, self.case_sensitive)(data_file.partition) + from pyiceberg.expressions.visitors import residual_evaluator_of + + # assert self.row_filter == False return lambda datafile: ( residual_evaluator_of( spec=spec, @@ -1895,8 +1641,7 @@ def _build_residual_evaluator(self, spec_id: int) -> Callable[[DataFile], Residu ) ) - @staticmethod - def _check_sequence_number(min_sequence_number: int, manifest: ManifestFile) -> bool: + def _check_sequence_number(self, min_sequence_number: int, manifest: ManifestFile) -> bool: """Ensure that no manifests are loaded that contain deletes that are older than the data. Args: @@ -1954,6 +1699,7 @@ def plan_files(self) -> Iterable[FileScanTask]: self.io, manifest, partition_evaluators[manifest.partition_spec_id], + residual_evaluators[manifest.partition_spec_id], self._build_metrics_evaluator(), ) for manifest in manifests @@ -2012,7 +1758,7 @@ def to_arrow_batch_reader(self) -> pa.RecordBatchReader: """ import pyarrow as pa - from pyiceberg.io.pyarrow import ArrowScan, schema_to_pyarrow + from pyiceberg.io.pyarrow import ArrowScan target_schema = schema_to_pyarrow(self.projection()) batches = ArrowScan( @@ -2022,7 +1768,7 @@ def to_arrow_batch_reader(self) -> pa.RecordBatchReader: return pa.RecordBatchReader.from_batches( target_schema, batches, - ).cast(target_schema) + ) def to_pandas(self, **kwargs: Any) -> pd.DataFrame: """Read a Pandas DataFrame eagerly from this Iceberg table. @@ -2070,8 +1816,6 @@ def to_polars(self) -> pl.DataFrame: return result def count(self) -> int: - from pyiceberg.io.pyarrow import ArrowScan - # Usage: Calculates the total number of records in a Scan that haven't had positional deletes. res = 0 # every task is a FileScanTask @@ -2114,15 +1858,20 @@ def generate_data_file_filename(self, extension: str) -> str: return f"00000-{self.task_id}-{self.write_uuid}.{extension}" +@dataclass(frozen=True) +class AddFileTask: + """Task with the parameters for adding a Parquet file as a DataFile.""" + + file_path: str + partition_field_value: Record + + def _parquet_files_to_data_files(table_metadata: TableMetadata, file_paths: List[str], io: FileIO) -> Iterable[DataFile]: """Convert a list files into DataFiles. Returns: An iterable that supplies DataFiles that describe the parquet files. """ - from pyiceberg.io.pyarrow import parquet_file_to_data_file - - executor = ExecutorFactory.get_or_create() - futures = [executor.submit(parquet_file_to_data_file, io, table_metadata, file_path) for file_path in file_paths] + from pyiceberg.io.pyarrow import parquet_files_to_data_files - return [f.result() for f in futures if f.result()] + yield from parquet_files_to_data_files(io=io, table_metadata=table_metadata, file_paths=iter(file_paths)) diff --git a/pyiceberg/table/inspect.py b/pyiceberg/table/inspect.py index 3bb0268a05..91bdb2f29d 100644 --- a/pyiceberg/table/inspect.py +++ b/pyiceberg/table/inspect.py @@ -20,7 +20,7 @@ from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Set, Tuple from pyiceberg.conversions import from_bytes -from pyiceberg.manifest import DataFile, DataFileContent, ManifestContent, ManifestFile, PartitionFieldSummary +from pyiceberg.manifest import DataFile, DataFileContent, ManifestContent, PartitionFieldSummary from pyiceberg.partitioning import PartitionSpec from pyiceberg.table.snapshots import Snapshot, ancestors_of from pyiceberg.types import PrimitiveType @@ -161,7 +161,7 @@ def _readable_metrics_struct(bound_type: PrimitiveType) -> pa.StructType: entries = [] snapshot = self._get_snapshot(snapshot_id) for manifest in snapshot.manifests(self.tbl.io): - for entry in manifest.fetch_manifest_entry(io=self.tbl.io, discard_deleted=False): + for entry in manifest.fetch_manifest_entry(io=self.tbl.io): column_sizes = entry.data_file.column_sizes or {} value_counts = entry.data_file.value_counts or {} null_value_counts = entry.data_file.null_value_counts or {} @@ -205,9 +205,9 @@ def _readable_metrics_struct(bound_type: PrimitiveType) -> pa.StructType: "record_count": entry.data_file.record_count, "file_size_in_bytes": entry.data_file.file_size_in_bytes, "column_sizes": dict(entry.data_file.column_sizes), - "value_counts": dict(entry.data_file.value_counts or {}), - "null_value_counts": dict(entry.data_file.null_value_counts or {}), - "nan_value_counts": dict(entry.data_file.nan_value_counts or {}), + "value_counts": dict(entry.data_file.value_counts), + "null_value_counts": dict(entry.data_file.null_value_counts), + "nan_value_counts": dict(entry.data_file.nan_value_counts), "lower_bounds": entry.data_file.lower_bounds, "upper_bounds": entry.data_file.upper_bounds, "key_metadata": entry.data_file.key_metadata, @@ -523,73 +523,7 @@ def history(self) -> "pa.Table": return pa.Table.from_pylist(history, schema=history_schema) - def _get_files_from_manifest( - self, manifest_list: ManifestFile, data_file_filter: Optional[Set[DataFileContent]] = None - ) -> "pa.Table": - import pyarrow as pa - - files: list[dict[str, Any]] = [] - schema = self.tbl.metadata.schema() - io = self.tbl.io - - for manifest_entry in manifest_list.fetch_manifest_entry(io): - data_file = manifest_entry.data_file - if data_file_filter and data_file.content not in data_file_filter: - continue - column_sizes = data_file.column_sizes or {} - value_counts = data_file.value_counts or {} - null_value_counts = data_file.null_value_counts or {} - nan_value_counts = data_file.nan_value_counts or {} - lower_bounds = data_file.lower_bounds or {} - upper_bounds = data_file.upper_bounds or {} - readable_metrics = { - schema.find_column_name(field.field_id): { - "column_size": column_sizes.get(field.field_id), - "value_count": value_counts.get(field.field_id), - "null_value_count": null_value_counts.get(field.field_id), - "nan_value_count": nan_value_counts.get(field.field_id), - "lower_bound": from_bytes(field.field_type, lower_bound) - if (lower_bound := lower_bounds.get(field.field_id)) - else None, - "upper_bound": from_bytes(field.field_type, upper_bound) - if (upper_bound := upper_bounds.get(field.field_id)) - else None, - } - for field in self.tbl.metadata.schema().fields - } - partition = data_file.partition - partition_record_dict = { - field.name: partition[pos] - for pos, field in enumerate(self.tbl.metadata.specs()[manifest_list.partition_spec_id].fields) - } - files.append( - { - "content": data_file.content, - "file_path": data_file.file_path, - "file_format": data_file.file_format, - "spec_id": data_file.spec_id, - "partition": partition_record_dict, - "record_count": data_file.record_count, - "file_size_in_bytes": data_file.file_size_in_bytes, - "column_sizes": dict(data_file.column_sizes) if data_file.column_sizes is not None else None, - "value_counts": dict(data_file.value_counts) if data_file.value_counts is not None else None, - "null_value_counts": dict(data_file.null_value_counts) if data_file.null_value_counts is not None else None, - "nan_value_counts": dict(data_file.nan_value_counts) if data_file.nan_value_counts is not None else None, - "lower_bounds": dict(data_file.lower_bounds) if data_file.lower_bounds is not None else None, - "upper_bounds": dict(data_file.upper_bounds) if data_file.upper_bounds is not None else None, - "key_metadata": data_file.key_metadata, - "split_offsets": data_file.split_offsets, - "equality_ids": data_file.equality_ids, - "sort_order_id": data_file.sort_order_id, - "readable_metrics": readable_metrics, - } - ) - return pa.Table.from_pylist( - files, - schema=self._get_files_schema(), - ) - - def _get_files_schema(self) -> "pa.Schema": + def _files(self, snapshot_id: Optional[int] = None, data_file_filter: Optional[Set[DataFileContent]] = None) -> "pa.Table": import pyarrow as pa from pyiceberg.io.pyarrow import schema_to_pyarrow @@ -610,9 +544,6 @@ def _readable_metrics_struct(bound_type: PrimitiveType) -> pa.StructType: ] ) - partition_record = self.tbl.metadata.specs_struct() - pa_record_struct = schema_to_pyarrow(partition_record) - for field in self.tbl.metadata.schema().fields: readable_metrics_struct.append( pa.field(schema.find_column_name(field.field_id), _readable_metrics_struct(field.field_type), nullable=False) @@ -624,7 +555,6 @@ def _readable_metrics_struct(bound_type: PrimitiveType) -> pa.StructType: pa.field("file_path", pa.string(), nullable=False), pa.field("file_format", pa.dictionary(pa.int32(), pa.string()), nullable=False), pa.field("spec_id", pa.int32(), nullable=False), - pa.field("partition", pa_record_struct, nullable=False), pa.field("record_count", pa.int64(), nullable=False), pa.field("file_size_in_bytes", pa.int64(), nullable=False), pa.field("column_sizes", pa.map_(pa.int32(), pa.int64()), nullable=True), @@ -640,24 +570,71 @@ def _readable_metrics_struct(bound_type: PrimitiveType) -> pa.StructType: pa.field("readable_metrics", pa.struct(readable_metrics_struct), nullable=True), ] ) - return files_schema - def _files(self, snapshot_id: Optional[int] = None, data_file_filter: Optional[Set[DataFileContent]] = None) -> "pa.Table": - import pyarrow as pa + files: list[dict[str, Any]] = [] if not snapshot_id and not self.tbl.metadata.current_snapshot(): - return self._get_files_schema().empty_table() - + return pa.Table.from_pylist( + files, + schema=files_schema, + ) snapshot = self._get_snapshot(snapshot_id) + io = self.tbl.io + for manifest_list in snapshot.manifests(io): + for manifest_entry in manifest_list.fetch_manifest_entry(io): + data_file = manifest_entry.data_file + if data_file_filter and data_file.content not in data_file_filter: + continue + column_sizes = data_file.column_sizes or {} + value_counts = data_file.value_counts or {} + null_value_counts = data_file.null_value_counts or {} + nan_value_counts = data_file.nan_value_counts or {} + lower_bounds = data_file.lower_bounds or {} + upper_bounds = data_file.upper_bounds or {} + readable_metrics = { + schema.find_column_name(field.field_id): { + "column_size": column_sizes.get(field.field_id), + "value_count": value_counts.get(field.field_id), + "null_value_count": null_value_counts.get(field.field_id), + "nan_value_count": nan_value_counts.get(field.field_id), + "lower_bound": from_bytes(field.field_type, lower_bound) + if (lower_bound := lower_bounds.get(field.field_id)) + else None, + "upper_bound": from_bytes(field.field_type, upper_bound) + if (upper_bound := upper_bounds.get(field.field_id)) + else None, + } + for field in self.tbl.metadata.schema().fields + } + files.append( + { + "content": data_file.content, + "file_path": data_file.file_path, + "file_format": data_file.file_format, + "spec_id": data_file.spec_id, + "record_count": data_file.record_count, + "file_size_in_bytes": data_file.file_size_in_bytes, + "column_sizes": dict(data_file.column_sizes) if data_file.column_sizes is not None else None, + "value_counts": dict(data_file.value_counts) if data_file.value_counts is not None else None, + "null_value_counts": dict(data_file.null_value_counts) + if data_file.null_value_counts is not None + else None, + "nan_value_counts": dict(data_file.nan_value_counts) if data_file.nan_value_counts is not None else None, + "lower_bounds": dict(data_file.lower_bounds) if data_file.lower_bounds is not None else None, + "upper_bounds": dict(data_file.upper_bounds) if data_file.upper_bounds is not None else None, + "key_metadata": data_file.key_metadata, + "split_offsets": data_file.split_offsets, + "equality_ids": data_file.equality_ids, + "sort_order_id": data_file.sort_order_id, + "readable_metrics": readable_metrics, + } + ) - executor = ExecutorFactory.get_or_create() - results = list( - executor.map( - lambda manifest_list: self._get_files_from_manifest(manifest_list, data_file_filter), snapshot.manifests(io) - ) + return pa.Table.from_pylist( + files, + schema=files_schema, ) - return pa.concat_tables(results) def files(self, snapshot_id: Optional[int] = None) -> "pa.Table": return self._files(snapshot_id) @@ -680,30 +657,3 @@ def all_manifests(self) -> "pa.Table": lambda args: self._generate_manifests_table(*args), [(snapshot, True) for snapshot in snapshots] ) return pa.concat_tables(manifests_by_snapshots) - - def _all_files(self, data_file_filter: Optional[Set[DataFileContent]] = None) -> "pa.Table": - import pyarrow as pa - - snapshots = self.tbl.snapshots() - if not snapshots: - return pa.Table.from_pylist([], schema=self._get_files_schema()) - - executor = ExecutorFactory.get_or_create() - manifest_lists = executor.map(lambda snapshot: snapshot.manifests(self.tbl.io), snapshots) - - unique_manifests = {(manifest.manifest_path, manifest) for manifest_list in manifest_lists for manifest in manifest_list} - - file_lists = executor.map( - lambda args: self._get_files_from_manifest(*args), [(manifest, data_file_filter) for _, manifest in unique_manifests] - ) - - return pa.concat_tables(file_lists) - - def all_files(self) -> "pa.Table": - return self._all_files() - - def all_data_files(self) -> "pa.Table": - return self._all_files({DataFileContent.DATA}) - - def all_delete_files(self) -> "pa.Table": - return self._all_files({DataFileContent.POSITION_DELETES, DataFileContent.EQUALITY_DELETES}) diff --git a/pyiceberg/table/maintenance.py b/pyiceberg/table/maintenance.py deleted file mode 100644 index 0fcda35ae9..0000000000 --- a/pyiceberg/table/maintenance.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING - -logger = logging.getLogger(__name__) - - -if TYPE_CHECKING: - from pyiceberg.table import Table - from pyiceberg.table.update.snapshot import ExpireSnapshots - - -class MaintenanceTable: - tbl: Table - - def __init__(self, tbl: Table) -> None: - self.tbl = tbl - - def expire_snapshots(self) -> ExpireSnapshots: - """Return an ExpireSnapshots builder for snapshot expiration operations. - - Returns: - ExpireSnapshots builder for configuring and executing snapshot expiration. - """ - from pyiceberg.table import Transaction - from pyiceberg.table.update.snapshot import ExpireSnapshots - - return ExpireSnapshots(transaction=Transaction(self.tbl, autocommit=True)) diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index 9c2ae29cdd..d5ce76560c 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -36,7 +36,7 @@ SortOrder, assign_fresh_sort_order_ids, ) -from pyiceberg.table.statistics import PartitionStatisticsFile, StatisticsFile +from pyiceberg.table.statistics import StatisticsFile from pyiceberg.typedef import ( EMPTY_DICT, IcebergBaseModel, @@ -222,14 +222,6 @@ class TableMetadataCommonFields(IcebergBaseModel): table correctly. A table can contain many statistics files associated with different table snapshots.""" - partition_statistics: List[PartitionStatisticsFile] = Field(alias="partition-statistics", default_factory=list) - """A optional list of partition statistics files. - Partition statistics are not required for reading or planning - and readers may ignore them. Each table snapshot may be associated - with at most one partition statistics file. A writer can optionally - write the partition statistics file during each write operation, - or it can also be computed on demand.""" - # validators @field_validator("properties", mode="before") def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: @@ -557,6 +549,13 @@ def construct_refs(cls, table_metadata: TableMetadata) -> TableMetadata: """The table’s highest assigned sequence number, a monotonically increasing long that tracks the order of snapshots in a table.""" + row_lineage: bool = Field(alias="row-lineage", default=False) + """Indicates that row-lineage is enabled on the table + + For more information: + https://iceberg.apache.org/spec/?column-projection#row-lineage + """ + next_row_id: Optional[int] = Field(alias="next-row-id", default=None) """A long higher than all assigned row IDs; the next snapshot's `first-row-id`.""" @@ -579,11 +578,6 @@ def new_table_metadata( ) -> TableMetadata: from pyiceberg.table import TableProperties - # Remove format-version so it does not get persisted - format_version = int(properties.pop(TableProperties.FORMAT_VERSION, TableProperties.DEFAULT_FORMAT_VERSION)) - - schema.check_format_version_compatibility(format_version) - fresh_schema = assign_fresh_schema_ids(schema) fresh_partition_spec = assign_fresh_partition_spec_ids(partition_spec, schema, fresh_schema) fresh_sort_order = assign_fresh_sort_order_ids(sort_order, schema, fresh_schema) @@ -591,6 +585,8 @@ def new_table_metadata( if table_uuid is None: table_uuid = uuid.uuid4() + # Remove format-version so it does not get persisted + format_version = int(properties.pop(TableProperties.FORMAT_VERSION, TableProperties.DEFAULT_FORMAT_VERSION)) if format_version == 1: return TableMetadataV1( location=location, diff --git a/pyiceberg/table/puffin.py b/pyiceberg/table/puffin.py deleted file mode 100644 index a90ef7ee0d..0000000000 --- a/pyiceberg/table/puffin.py +++ /dev/null @@ -1,116 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import math -from typing import TYPE_CHECKING, Dict, List, Literal, Optional - -from pydantic import Field -from pyroaring import BitMap, FrozenBitMap - -from pyiceberg.typedef import IcebergBaseModel - -if TYPE_CHECKING: - import pyarrow as pa - -# Short for: Puffin Fratercula arctica, version 1 -MAGIC_BYTES = b"PFA1" -EMPTY_BITMAP = FrozenBitMap() -MAX_JAVA_SIGNED = int(math.pow(2, 31)) - 1 -PROPERTY_REFERENCED_DATA_FILE = "referenced-data-file" - - -def _deserialize_bitmap(pl: bytes) -> List[BitMap]: - number_of_bitmaps = int.from_bytes(pl[0:8], byteorder="little") - pl = pl[8:] - - bitmaps = [] - last_key = -1 - for _ in range(number_of_bitmaps): - key = int.from_bytes(pl[0:4], byteorder="little") - if key < 0: - raise ValueError(f"Invalid unsigned key: {key}") - if key <= last_key: - raise ValueError("Keys must be sorted in ascending order") - if key > MAX_JAVA_SIGNED: - raise ValueError(f"Key {key} is too large, max {MAX_JAVA_SIGNED} to maintain compatibility with Java impl") - pl = pl[4:] - - while last_key < key - 1: - bitmaps.append(EMPTY_BITMAP) - last_key += 1 - - bm = BitMap().deserialize(pl) - # TODO: Optimize this - pl = pl[len(bm.serialize()) :] - bitmaps.append(bm) - - last_key = key - - return bitmaps - - -class PuffinBlobMetadata(IcebergBaseModel): - type: Literal["deletion-vector-v1"] = Field() - fields: List[int] = Field() - snapshot_id: int = Field(alias="snapshot-id") - sequence_number: int = Field(alias="sequence-number") - offset: int = Field() - length: int = Field() - compression_codec: Optional[str] = Field(alias="compression-codec", default=None) - properties: Dict[str, str] = Field(default_factory=dict) - - -class Footer(IcebergBaseModel): - blobs: List[PuffinBlobMetadata] = Field() - properties: Dict[str, str] = Field(default_factory=dict) - - -def _bitmaps_to_chunked_array(bitmaps: List[BitMap]) -> "pa.ChunkedArray": - import pyarrow as pa - - return pa.chunked_array([(key_pos << 32) + pos for pos in bitmap] for key_pos, bitmap in enumerate(bitmaps)) - - -class PuffinFile: - footer: Footer - _deletion_vectors: Dict[str, List[BitMap]] - - def __init__(self, puffin: bytes) -> None: - for magic_bytes in [puffin[:4], puffin[-4:]]: - if magic_bytes != MAGIC_BYTES: - raise ValueError(f"Incorrect magic bytes, expected {MAGIC_BYTES!r}, got {magic_bytes!r}") - - # One flag is set, the rest should be zero - # byte 0 (first) - # - bit 0 (lowest bit): whether FooterPayload is compressed - # - all other bits are reserved for future use and should be set to 0 on write - flags = puffin[-8:-4] - if flags[0] != 0: - raise ValueError("The Puffin-file has a compressed footer, which is not yet supported") - - # 4 byte integer is always signed, in a two's complement representation, stored little-endian. - footer_payload_size_int = int.from_bytes(puffin[-12:-8], byteorder="little") - - self.footer = Footer.model_validate_json(puffin[-(footer_payload_size_int + 12) : -12]) - puffin = puffin[8:] - - self._deletion_vectors = { - blob.properties[PROPERTY_REFERENCED_DATA_FILE]: _deserialize_bitmap(puffin[blob.offset : blob.offset + blob.length]) - for blob in self.footer.blobs - } - - def to_vector(self) -> Dict[str, "pa.ChunkedArray"]: - return {path: _bitmaps_to_chunked_array(bitmaps) for path, bitmaps in self._deletion_vectors.items()} diff --git a/pyiceberg/table/snapshots.py b/pyiceberg/table/snapshots.py index 60ad7219e1..a5515f12b0 100644 --- a/pyiceberg/table/snapshots.py +++ b/pyiceberg/table/snapshots.py @@ -28,7 +28,6 @@ from pyiceberg.manifest import DataFile, DataFileContent, ManifestFile, _manifests from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.utils.deprecated import deprecation_message if TYPE_CHECKING: from pyiceberg.table.metadata import TableMetadata @@ -58,7 +57,6 @@ TOTAL_FILE_SIZE = "total-files-size" CHANGED_PARTITION_COUNT_PROP = "changed-partition-count" CHANGED_PARTITION_PREFIX = "partitions." -PARTITION_SUMMARY_PROP = "partition-summaries-included" OPERATION = "operation" INITIAL_SEQUENCE_NUMBER = 0 @@ -273,10 +271,10 @@ class SnapshotSummaryCollector: partition_metrics: DefaultDict[str, UpdateMetrics] max_changed_partitions_for_summaries: int - def __init__(self, partition_summary_limit: int = 0) -> None: + def __init__(self) -> None: self.metrics = UpdateMetrics() self.partition_metrics = defaultdict(UpdateMetrics) - self.max_changed_partitions_for_summaries = partition_summary_limit + self.max_changed_partitions_for_summaries = 0 def set_partition_summary_limit(self, limit: int) -> None: self.max_changed_partitions_for_summaries = limit @@ -307,8 +305,6 @@ def build(self) -> Dict[str, str]: changed_partitions_size = len(self.partition_metrics) set_when_positive(properties, changed_partitions_size, CHANGED_PARTITION_COUNT_PROP) if changed_partitions_size <= self.max_changed_partitions_for_summaries: - if changed_partitions_size > 0: - properties[PARTITION_SUMMARY_PROP] = "true" for partition_path, update_metrics_partition in self.partition_metrics.items(): if (summary := self._partition_summary(update_metrics_partition)) and len(summary) != 0: properties[CHANGED_PARTITION_PREFIX + partition_path] = summary @@ -360,11 +356,6 @@ def update_snapshot_summaries( raise ValueError(f"Operation not implemented: {summary.operation}") if truncate_full_table and summary.operation == Operation.OVERWRITE and previous_summary is not None: - deprecation_message( - deprecated_in="0.10.0", - removed_in="0.11.0", - help_message="The truncate-full-table shouldn't be used.", - ) summary = _truncate_table_summary(summary, previous_summary) if not previous_summary: @@ -438,16 +429,3 @@ def ancestors_of(current_snapshot: Optional[Snapshot], table_metadata: TableMeta if snapshot.parent_snapshot_id is None: break snapshot = table_metadata.snapshot_by_id(snapshot.parent_snapshot_id) - - -def ancestors_between( - from_snapshot: Optional[Snapshot], to_snapshot: Snapshot, table_metadata: TableMetadata -) -> Iterable[Snapshot]: - """Get the ancestors of and including the given snapshot between the to and from snapshots.""" - if from_snapshot is not None: - for snapshot in ancestors_of(to_snapshot, table_metadata): - yield snapshot - if snapshot == from_snapshot: - break - else: - yield from ancestors_of(to_snapshot, table_metadata) diff --git a/pyiceberg/table/statistics.py b/pyiceberg/table/statistics.py index 484391efb1..151f5e961c 100644 --- a/pyiceberg/table/statistics.py +++ b/pyiceberg/table/statistics.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Dict, List, Literal, Optional, Union +from typing import Dict, List, Literal, Optional from pydantic import Field @@ -29,26 +29,17 @@ class BlobMetadata(IcebergBaseModel): properties: Optional[Dict[str, str]] = None -class StatisticsCommonFields(IcebergBaseModel): - """Common fields between table and partition statistics structs found on metadata.""" - +class StatisticsFile(IcebergBaseModel): snapshot_id: int = Field(alias="snapshot-id") statistics_path: str = Field(alias="statistics-path") file_size_in_bytes: int = Field(alias="file-size-in-bytes") - - -class StatisticsFile(StatisticsCommonFields): file_footer_size_in_bytes: int = Field(alias="file-footer-size-in-bytes") key_metadata: Optional[str] = Field(alias="key-metadata", default=None) blob_metadata: List[BlobMetadata] = Field(alias="blob-metadata") -class PartitionStatisticsFile(StatisticsCommonFields): - pass - - def filter_statistics_by_snapshot_id( - statistics: List[Union[StatisticsFile, PartitionStatisticsFile]], + statistics: List[StatisticsFile], reject_snapshot_id: int, -) -> List[Union[StatisticsFile, PartitionStatisticsFile]]: +) -> List[StatisticsFile]: return [stat for stat in statistics if stat.snapshot_id != reject_snapshot_id] diff --git a/pyiceberg/table/update/__init__.py b/pyiceberg/table/update/__init__.py index 3f7d43f0ef..f60ac1e3ee 100644 --- a/pyiceberg/table/update/__init__.py +++ b/pyiceberg/table/update/__init__.py @@ -29,18 +29,14 @@ from pyiceberg.partitioning import PARTITION_FIELD_ID_START, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.table.metadata import SUPPORTED_TABLE_FORMAT_VERSION, TableMetadata, TableMetadataUtil -from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef, SnapshotRefType +from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef from pyiceberg.table.snapshots import ( MetadataLogEntry, Snapshot, SnapshotLogEntry, ) from pyiceberg.table.sorting import SortOrder -from pyiceberg.table.statistics import ( - PartitionStatisticsFile, - StatisticsFile, - filter_statistics_by_snapshot_id, -) +from pyiceberg.table.statistics import StatisticsFile, filter_statistics_by_snapshot_id from pyiceberg.typedef import ( IcebergBaseModel, Properties, @@ -143,7 +139,7 @@ class AddSnapshotUpdate(IcebergBaseModel): class SetSnapshotRefUpdate(IcebergBaseModel): action: Literal["set-snapshot-ref"] = Field(default="set-snapshot-ref") ref_name: str = Field(alias="ref-name") - type: Literal[SnapshotRefType.TAG, SnapshotRefType.BRANCH] + type: Literal["tag", "branch"] snapshot_id: int = Field(alias="snapshot-id") max_ref_age_ms: Annotated[Optional[int], Field(alias="max-ref-age-ms", default=None)] max_snapshot_age_ms: Annotated[Optional[int], Field(alias="max-snapshot-age-ms", default=None)] @@ -202,16 +198,6 @@ class RemoveStatisticsUpdate(IcebergBaseModel): snapshot_id: int = Field(alias="snapshot-id") -class SetPartitionStatisticsUpdate(IcebergBaseModel): - action: Literal["set-partition-statistics"] = Field(default="set-partition-statistics") - partition_statistics: PartitionStatisticsFile - - -class RemovePartitionStatisticsUpdate(IcebergBaseModel): - action: Literal["remove-partition-statistics"] = Field(default="remove-partition-statistics") - snapshot_id: int = Field(alias="snapshot-id") - - TableUpdate = Annotated[ Union[ AssignUUIDUpdate, @@ -231,8 +217,6 @@ class RemovePartitionStatisticsUpdate(IcebergBaseModel): RemovePropertiesUpdate, SetStatisticsUpdate, RemoveStatisticsUpdate, - SetPartitionStatisticsUpdate, - RemovePartitionStatisticsUpdate, ], Field(discriminator="action"), ] @@ -376,8 +360,7 @@ def _(update: SetCurrentSchemaUpdate, base_metadata: TableMetadata, context: _Ta @_apply_table_update.register(AddPartitionSpecUpdate) def _(update: AddPartitionSpecUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: for spec in base_metadata.partition_specs: - # Only raise in case of a discrepancy - if spec.spec_id == update.spec.spec_id and spec != update.spec: + if spec.spec_id == update.spec.spec_id: raise ValueError(f"Partition spec with id {spec.spec_id} already exists: {spec}") metadata_updates: Dict[str, Any] = { @@ -542,11 +525,6 @@ def _(update: RemoveSnapshotRefUpdate, base_metadata: TableMetadata, context: _T @_apply_table_update.register(AddSortOrderUpdate) def _(update: AddSortOrderUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: - for sort in base_metadata.sort_orders: - # Only raise in case of a discrepancy - if sort.order_id == update.sort_order.order_id and sort != update.sort_order: - raise ValueError(f"Sort-order with id {sort.order_id} already exists: {sort}") - context.add_update(update) return base_metadata.model_copy( update={ @@ -598,29 +576,6 @@ def _(update: RemoveStatisticsUpdate, base_metadata: TableMetadata, context: _Ta return base_metadata.model_copy(update={"statistics": statistics}) -@_apply_table_update.register(SetPartitionStatisticsUpdate) -def _(update: SetPartitionStatisticsUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: - partition_statistics = filter_statistics_by_snapshot_id( - base_metadata.partition_statistics, update.partition_statistics.snapshot_id - ) - context.add_update(update) - - return base_metadata.model_copy(update={"partition_statistics": partition_statistics + [update.partition_statistics]}) - - -@_apply_table_update.register(RemovePartitionStatisticsUpdate) -def _( - update: RemovePartitionStatisticsUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext -) -> TableMetadata: - if not any(part_stat.snapshot_id == update.snapshot_id for part_stat in base_metadata.partition_statistics): - raise ValueError(f"Partition Statistics with snapshot id {update.snapshot_id} does not exist") - - statistics = filter_statistics_by_snapshot_id(base_metadata.partition_statistics, update.snapshot_id) - context.add_update(update) - - return base_metadata.model_copy(update={"partition_statistics": statistics}) - - def update_table_metadata( base_metadata: TableMetadata, updates: Tuple[TableUpdate, ...], @@ -741,10 +696,6 @@ class AssertRefSnapshotId(ValidatableTableRequirement): def validate(self, base_metadata: Optional[TableMetadata]) -> None: if base_metadata is None: raise CommitFailedException("Requirement failed: current table metadata is missing") - elif len(base_metadata.snapshots) == 0 and self.ref != MAIN_BRANCH: - raise CommitFailedException( - f"Requirement failed: Table has no snapshots and can only be written to the {MAIN_BRANCH} BRANCH." - ) elif snapshot_ref := base_metadata.refs.get(self.ref): ref_type = snapshot_ref.snapshot_ref_type if self.snapshot_id is None: diff --git a/pyiceberg/table/update/schema.py b/pyiceberg/table/update/schema.py index 6ad01e97f2..8ee3b43c24 100644 --- a/pyiceberg/table/update/schema.py +++ b/pyiceberg/table/update/schema.py @@ -20,10 +20,9 @@ from copy import copy from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Union from pyiceberg.exceptions import ResolveError, ValidationError -from pyiceberg.expressions import literal # type: ignore from pyiceberg.schema import ( PartnerAccessor, Schema, @@ -48,7 +47,6 @@ UpdatesAndRequirements, UpdateTableMetadata, ) -from pyiceberg.typedef import L from pyiceberg.types import IcebergType, ListType, MapType, NestedField, PrimitiveType, StructType if TYPE_CHECKING: @@ -155,12 +153,7 @@ def union_by_name(self, new_schema: Union[Schema, "pa.Schema"]) -> UpdateSchema: return self def add_column( - self, - path: Union[str, Tuple[str, ...]], - field_type: IcebergType, - doc: Optional[str] = None, - required: bool = False, - default_value: Optional[L] = None, + self, path: Union[str, Tuple[str, ...]], field_type: IcebergType, doc: Optional[str] = None, required: bool = False ) -> UpdateSchema: """Add a new column to a nested struct or Add a new top-level column. @@ -175,7 +168,6 @@ def add_column( field_type: Type for the new column. doc: Documentation string for the new column. required: Whether the new column is required. - default_value: Default value for the new column. Returns: This for method chaining. @@ -185,6 +177,10 @@ def add_column( raise ValueError(f"Cannot add column with ambiguous name: {path}, provide a tuple instead") path = (path,) + if required and not self._allow_incompatible_changes: + # Table format version 1 and 2 cannot add required column because there is no initial value + raise ValueError(f"Incompatible change: cannot add required column: {'.'.join(path)}") + name = path[-1] parent = path[:-1] @@ -216,34 +212,13 @@ def add_column( # assign new IDs in order new_id = self.assign_new_column_id() - new_type = assign_fresh_schema_ids(field_type, self.assign_new_column_id) - - if default_value is not None: - try: - # To make sure that the value is valid for the type - initial_default = literal(default_value).to(new_type).value - except ValueError as e: - raise ValueError(f"Invalid default value: {e}") from e - else: - initial_default = default_value # type: ignore - - if (required and initial_default is None) and not self._allow_incompatible_changes: - # Table format version 1 and 2 cannot add required column because there is no initial value - raise ValueError(f"Incompatible change: cannot add required column: {'.'.join(path)}") # update tracking for moves self._added_name_to_id[full_name] = new_id self._id_to_parent[new_id] = parent_full_path - field = NestedField( - field_id=new_id, - name=name, - field_type=new_type, - required=required, - doc=doc, - initial_default=initial_default, - write_default=initial_default, - ) + new_type = assign_fresh_schema_ids(field_type, self.assign_new_column_id) + field = NestedField(field_id=new_id, name=name, field_type=new_type, required=required, doc=doc) if parent_id in self._adds: self._adds[parent_id].append(field) @@ -275,19 +250,6 @@ def delete_column(self, path: Union[str, Tuple[str, ...]]) -> UpdateSchema: return self - def set_default_value(self, path: Union[str, Tuple[str, ...]], default_value: Optional[L]) -> UpdateSchema: - """Set the default value of a column. - - Args: - path: The path to the column. - - Returns: - The UpdateSchema with the delete operation staged. - """ - self._set_column_default_value(path, default_value) - - return self - def rename_column(self, path_from: Union[str, Tuple[str, ...]], new_name: str) -> UpdateSchema: """Update the name of a column. @@ -311,8 +273,6 @@ def rename_column(self, path_from: Union[str, Tuple[str, ...]], new_name: str) - field_type=updated.field_type, doc=updated.doc, required=updated.required, - initial_default=updated.initial_default, - write_default=updated.write_default, ) else: self._updates[field_from.field_id] = NestedField( @@ -321,8 +281,6 @@ def rename_column(self, path_from: Union[str, Tuple[str, ...]], new_name: str) - field_type=field_from.field_type, doc=field_from.doc, required=field_from.required, - initial_default=field_from.initial_default, - write_default=field_from.write_default, ) # Lookup the field because of casing @@ -372,8 +330,6 @@ def _set_column_requirement(self, path: Union[str, Tuple[str, ...]], required: b field_type=updated.field_type, doc=updated.doc, required=required, - initial_default=updated.initial_default, - write_default=updated.write_default, ) else: self._updates[field.field_id] = NestedField( @@ -382,52 +338,6 @@ def _set_column_requirement(self, path: Union[str, Tuple[str, ...]], required: b field_type=field.field_type, doc=field.doc, required=required, - initial_default=field.initial_default, - write_default=field.write_default, - ) - - def _set_column_default_value(self, path: Union[str, Tuple[str, ...]], default_value: Any) -> None: - path = (path,) if isinstance(path, str) else path - name = ".".join(path) - - field = self._schema.find_field(name, self._case_sensitive) - - if default_value is not None: - try: - # To make sure that the value is valid for the type - default_value = literal(default_value).to(field.field_type).value - except ValueError as e: - raise ValueError(f"Invalid default value: {e}") from e - - if field.required and default_value == field.write_default: - # if the change is a noop, allow it even if allowIncompatibleChanges is false - return - - if not self._allow_incompatible_changes and field.required and default_value is None: - raise ValueError("Cannot change change default-value of a required column to None") - - if field.field_id in self._deletes: - raise ValueError(f"Cannot update a column that will be deleted: {name}") - - if updated := self._updates.get(field.field_id): - self._updates[field.field_id] = NestedField( - field_id=updated.field_id, - name=updated.name, - field_type=updated.field_type, - doc=updated.doc, - required=updated.required, - initial_default=updated.initial_default, - write_default=default_value, - ) - else: - self._updates[field.field_id] = NestedField( - field_id=field.field_id, - name=field.name, - field_type=field.field_type, - doc=field.doc, - required=field.required, - initial_default=field.initial_default, - write_default=default_value, ) def update_column( @@ -477,8 +387,6 @@ def update_column( field_type=field_type or updated.field_type, doc=doc if doc is not None else updated.doc, required=updated.required, - initial_default=updated.initial_default, - write_default=updated.write_default, ) else: self._updates[field.field_id] = NestedField( @@ -487,8 +395,6 @@ def update_column( field_type=field_type or field.field_type, doc=doc if doc is not None else field.doc, required=field.required, - initial_default=field.initial_default, - write_default=field.write_default, ) if required is not None: @@ -730,35 +636,19 @@ def struct(self, struct: StructType, field_results: List[Optional[IcebergType]]) name = field.name doc = field.doc required = field.required - write_default = field.write_default # There is an update if update := self._updates.get(field.field_id): name = update.name doc = update.doc required = update.required - write_default = update.write_default - - if ( - field.name == name - and field.field_type == result_type - and field.required == required - and field.doc == doc - and field.write_default == write_default - ): + + if field.name == name and field.field_type == result_type and field.required == required and field.doc == doc: new_fields.append(field) else: has_changes = True new_fields.append( - NestedField( - field_id=field.field_id, - name=name, - field_type=result_type, - required=required, - doc=doc, - initial_default=field.initial_default, - write_default=write_default, - ) + NestedField(field_id=field.field_id, name=name, field_type=result_type, required=required, doc=doc) ) if has_changes: diff --git a/pyiceberg/table/update/snapshot.py b/pyiceberg/table/update/snapshot.py index 018f6614c1..f21c501780 100644 --- a/pyiceberg/table/update/snapshot.py +++ b/pyiceberg/table/update/snapshot.py @@ -22,13 +22,11 @@ from abc import abstractmethod from collections import defaultdict from concurrent.futures import Future -from datetime import datetime from functools import cached_property from typing import TYPE_CHECKING, Callable, Dict, Generic, List, Optional, Set, Tuple from sortedcontainers import SortedList -from pyiceberg.avro.codecs import AvroCompressionCodec from pyiceberg.expressions import ( AlwaysFalse, BooleanExpression, @@ -57,7 +55,6 @@ from pyiceberg.partitioning import ( PartitionSpec, ) -from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRefType from pyiceberg.table.snapshots import ( Operation, Snapshot, @@ -69,7 +66,6 @@ AddSnapshotUpdate, AssertRefSnapshotId, RemoveSnapshotRefUpdate, - RemoveSnapshotsUpdate, SetSnapshotRefUpdate, TableRequirement, TableUpdate, @@ -83,7 +79,6 @@ ) from pyiceberg.utils.bin_packing import ListPacker from pyiceberg.utils.concurrent import ExecutorFactory -from pyiceberg.utils.datetime import datetime_to_millis from pyiceberg.utils.properties import property_as_bool, property_as_int if TYPE_CHECKING: @@ -109,8 +104,6 @@ class _SnapshotProducer(UpdateTableMetadata[U], Generic[U]): _added_data_files: List[DataFile] _manifest_num_counter: itertools.count[int] _deleted_data_files: Set[DataFile] - _compression: AvroCompressionCodec - _target_branch = MAIN_BRANCH def __init__( self, @@ -119,36 +112,20 @@ def __init__( io: FileIO, commit_uuid: Optional[uuid.UUID] = None, snapshot_properties: Dict[str, str] = EMPTY_DICT, - branch: str = MAIN_BRANCH, ) -> None: super().__init__(transaction) self.commit_uuid = commit_uuid or uuid.uuid4() self._io = io self._operation = operation self._snapshot_id = self._transaction.table_metadata.new_snapshot_id() + # Since we only support the main branch for now + self._parent_snapshot_id = ( + snapshot.snapshot_id if (snapshot := self._transaction.table_metadata.current_snapshot()) else None + ) self._added_data_files = [] self._deleted_data_files = set() self.snapshot_properties = snapshot_properties self._manifest_num_counter = itertools.count(0) - from pyiceberg.table import TableProperties - - self._compression = self._transaction.table_metadata.properties.get( # type: ignore - TableProperties.WRITE_AVRO_COMPRESSION, TableProperties.WRITE_AVRO_COMPRESSION_DEFAULT - ) - self._target_branch = self._validate_target_branch(branch=branch) - self._parent_snapshot_id = ( - snapshot.snapshot_id if (snapshot := self._transaction.table_metadata.snapshot_by_name(self._target_branch)) else None - ) - - def _validate_target_branch(self, branch: str) -> str: - # Default is already set to MAIN_BRANCH. So branch name can't be None. - if branch is None: - raise ValueError("Invalid branch name: null") - if branch in self._transaction.table_metadata.refs: - ref = self._transaction.table_metadata.refs[branch] - if ref.snapshot_ref_type != SnapshotRefType.BRANCH: - raise ValueError(f"{branch} is a tag, not a branch. Tags cannot be targets for producing snapshots") - return branch def append_data_file(self, data_file: DataFile) -> _SnapshotProducer[U]: self._added_data_files.append(data_file) @@ -177,11 +154,10 @@ def _write_added_manifest() -> List[ManifestFile]: schema=self._transaction.table_metadata.schema(), output_file=self.new_manifest_output(), snapshot_id=self._snapshot_id, - avro_compression=self._compression, ) as writer: for data_file in self._added_data_files: writer.add( - ManifestEntry.from_args( + ManifestEntry( status=ManifestEntryStatus.ADDED, snapshot_id=self._snapshot_id, sequence_number=None, @@ -208,7 +184,6 @@ def _write_delete_manifest() -> List[ManifestFile]: schema=self._transaction.table_metadata.schema(), output_file=self.new_manifest_output(), snapshot_id=self._snapshot_id, - avro_compression=self._compression, ) as writer: for entry in entries: writer.add_entry(entry) @@ -228,12 +203,13 @@ def _write_delete_manifest() -> List[ManifestFile]: def _summary(self, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> Summary: from pyiceberg.table import TableProperties + ssc = SnapshotSummaryCollector() partition_summary_limit = int( self._transaction.table_metadata.properties.get( TableProperties.WRITE_PARTITION_SUMMARY_LIMIT, TableProperties.WRITE_PARTITION_SUMMARY_LIMIT_DEFAULT ) ) - ssc = SnapshotSummaryCollector(partition_summary_limit=partition_summary_limit) + ssc.set_partition_summary_limit(partition_summary_limit) for data_file in self._added_data_files: ssc.add_file( @@ -260,6 +236,7 @@ def _summary(self, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> Summary: return update_snapshot_summaries( summary=Summary(operation=self._operation, **ssc.build(), **snapshot_properties), previous_summary=previous_snapshot.summary if previous_snapshot is not None else None, + truncate_full_table=self._operation == Operation.OVERWRITE, ) def _commit(self) -> UpdatesAndRequirements: @@ -274,14 +251,12 @@ def _commit(self) -> UpdatesAndRequirements: ) location_provider = self._transaction._table.location_provider() manifest_list_file_path = location_provider.new_metadata_location(file_name) - with write_manifest_list( format_version=self._transaction.table_metadata.format_version, output_file=self._io.new_output(manifest_list_file_path), snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, sequence_number=next_sequence_number, - avro_compression=self._compression, ) as writer: writer.add_manifests(new_manifests) @@ -298,20 +273,10 @@ def _commit(self) -> UpdatesAndRequirements: ( AddSnapshotUpdate(snapshot=snapshot), SetSnapshotRefUpdate( - snapshot_id=self._snapshot_id, - parent_snapshot_id=self._parent_snapshot_id, - ref_name=self._target_branch, - type=SnapshotRefType.BRANCH, - ), - ), - ( - AssertRefSnapshotId( - snapshot_id=self._transaction.table_metadata.refs[self._target_branch].snapshot_id - if self._target_branch in self._transaction.table_metadata.refs - else None, - ref=self._target_branch, + snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" ), ), + (AssertRefSnapshotId(snapshot_id=self._transaction.table_metadata.current_snapshot_id, ref="main"),), ) @property @@ -328,7 +293,6 @@ def new_manifest_writer(self, spec: PartitionSpec) -> ManifestWriter: schema=self._transaction.table_metadata.schema(), output_file=self.new_manifest_output(), snapshot_id=self._snapshot_id, - avro_compression=self._compression, ) def new_manifest_output(self) -> OutputFile: @@ -359,11 +323,10 @@ def __init__( operation: Operation, transaction: Transaction, io: FileIO, - branch: str, commit_uuid: Optional[uuid.UUID] = None, snapshot_properties: Dict[str, str] = EMPTY_DICT, ): - super().__init__(operation, transaction, io, commit_uuid, snapshot_properties, branch) + super().__init__(operation, transaction, io, commit_uuid, snapshot_properties) self._predicate = AlwaysFalse() self._case_sensitive = True @@ -405,7 +368,7 @@ def _compute_deletes(self) -> Tuple[List[ManifestFile], List[ManifestEntry], boo schema = self._transaction.table_metadata.schema() def _copy_with_new_status(entry: ManifestEntry, status: ManifestEntryStatus) -> ManifestEntry: - return ManifestEntry.from_args( + return ManifestEntry( status=status, snapshot_id=entry.snapshot_id, sequence_number=entry.sequence_number, @@ -423,53 +386,46 @@ def _copy_with_new_status(entry: ManifestEntry, status: ManifestEntryStatus) -> total_deleted_entries = [] partial_rewrites_needed = False self._deleted_data_files = set() - - # Determine the snapshot to read manifests from for deletion - # Should be the current tip of the _target_branch - parent_snapshot_id_for_delete_source = self._parent_snapshot_id - if parent_snapshot_id_for_delete_source is not None: - snapshot = self._transaction.table_metadata.snapshot_by_id(parent_snapshot_id_for_delete_source) - if snapshot: # Ensure snapshot is found - for manifest_file in snapshot.manifests(io=self._io): - if manifest_file.content == ManifestContent.DATA: - if not manifest_evaluators[manifest_file.partition_spec_id](manifest_file): - # If the manifest isn't relevant, we can just keep it in the manifest-list - existing_manifests.append(manifest_file) - else: - # It is relevant, let's check out the content - deleted_entries = [] - existing_entries = [] - for entry in manifest_file.fetch_manifest_entry(io=self._io, discard_deleted=True): - if strict_metrics_evaluator(entry.data_file) == ROWS_MUST_MATCH: - # Based on the metadata, it can be dropped right away - deleted_entries.append(_copy_with_new_status(entry, ManifestEntryStatus.DELETED)) - self._deleted_data_files.add(entry.data_file) - else: - # Based on the metadata, we cannot determine if it can be deleted - existing_entries.append(_copy_with_new_status(entry, ManifestEntryStatus.EXISTING)) - if inclusive_metrics_evaluator(entry.data_file) != ROWS_MIGHT_NOT_MATCH: - partial_rewrites_needed = True - - if len(deleted_entries) > 0: - total_deleted_entries += deleted_entries - - # Rewrite the manifest - if len(existing_entries) > 0: - with write_manifest( - format_version=self._transaction.table_metadata.format_version, - spec=self._transaction.table_metadata.specs()[manifest_file.partition_spec_id], - schema=self._transaction.table_metadata.schema(), - output_file=self.new_manifest_output(), - snapshot_id=self._snapshot_id, - avro_compression=self._compression, - ) as writer: - for existing_entry in existing_entries: - writer.add_entry(existing_entry) - existing_manifests.append(writer.to_manifest_file()) - else: - existing_manifests.append(manifest_file) - else: + if snapshot := self._transaction.table_metadata.current_snapshot(): + for manifest_file in snapshot.manifests(io=self._io): + if manifest_file.content == ManifestContent.DATA: + if not manifest_evaluators[manifest_file.partition_spec_id](manifest_file): + # If the manifest isn't relevant, we can just keep it in the manifest-list existing_manifests.append(manifest_file) + else: + # It is relevant, let's check out the content + deleted_entries = [] + existing_entries = [] + for entry in manifest_file.fetch_manifest_entry(io=self._io, discard_deleted=True): + if strict_metrics_evaluator(entry.data_file) == ROWS_MUST_MATCH: + # Based on the metadata, it can be dropped right away + deleted_entries.append(_copy_with_new_status(entry, ManifestEntryStatus.DELETED)) + self._deleted_data_files.add(entry.data_file) + else: + # Based on the metadata, we cannot determine if it can be deleted + existing_entries.append(_copy_with_new_status(entry, ManifestEntryStatus.EXISTING)) + if inclusive_metrics_evaluator(entry.data_file) != ROWS_MIGHT_NOT_MATCH: + partial_rewrites_needed = True + + if len(deleted_entries) > 0: + total_deleted_entries += deleted_entries + + # Rewrite the manifest + if len(existing_entries) > 0: + with write_manifest( + format_version=self._transaction.table_metadata.format_version, + spec=self._transaction.table_metadata.specs()[manifest_file.partition_spec_id], + schema=self._transaction.table_metadata.schema(), + output_file=self.new_manifest_output(), + snapshot_id=self._snapshot_id, + ) as writer: + for existing_entry in existing_entries: + writer.add_entry(existing_entry) + existing_manifests.append(writer.to_manifest_file()) + else: + existing_manifests.append(manifest_file) + else: + existing_manifests.append(manifest_file) return existing_manifests, total_deleted_entries, partial_rewrites_needed @@ -529,13 +485,12 @@ def __init__( operation: Operation, transaction: Transaction, io: FileIO, - branch: str, commit_uuid: Optional[uuid.UUID] = None, snapshot_properties: Dict[str, str] = EMPTY_DICT, ) -> None: from pyiceberg.table import TableProperties - super().__init__(operation, transaction, io, commit_uuid, snapshot_properties, branch) + super().__init__(operation, transaction, io, commit_uuid, snapshot_properties) self._target_size_bytes = property_as_int( self._transaction.table_metadata.properties, TableProperties.MANIFEST_TARGET_SIZE_BYTES, @@ -581,7 +536,7 @@ def _existing_manifests(self) -> List[ManifestFile]: """Determine if there are any existing manifest files.""" existing_files = [] - if snapshot := self._transaction.table_metadata.snapshot_by_name(name=self._target_branch): + if snapshot := self._transaction.table_metadata.current_snapshot(): for manifest_file in snapshot.manifests(io=self._io): entries = manifest_file.fetch_manifest_entry(io=self._io, discard_deleted=True) found_deleted_data_files = [entry.data_file for entry in entries if entry.data_file in self._deleted_data_files] @@ -597,19 +552,20 @@ def _existing_manifests(self) -> List[ManifestFile]: schema=self._transaction.table_metadata.schema(), output_file=self.new_manifest_output(), snapshot_id=self._snapshot_id, - avro_compression=self._compression, ) as writer: - for entry in entries: - if entry.data_file not in found_deleted_data_files: - writer.add_entry( - ManifestEntry.from_args( - status=ManifestEntryStatus.EXISTING, - snapshot_id=entry.snapshot_id, - sequence_number=entry.sequence_number, - file_sequence_number=entry.file_sequence_number, - data_file=entry.data_file, - ) + [ + writer.add_entry( + ManifestEntry( + status=ManifestEntryStatus.EXISTING, + snapshot_id=entry.snapshot_id, + sequence_number=entry.sequence_number, + file_sequence_number=entry.file_sequence_number, + data_file=entry.data_file, ) + ) + for entry in entries + if entry.data_file not in found_deleted_data_files + ] existing_files.append(writer.to_manifest_file()) return existing_files @@ -630,7 +586,7 @@ def _deleted_entries(self) -> List[ManifestEntry]: def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: return [ - ManifestEntry.from_args( + ManifestEntry( status=ManifestEntryStatus.DELETED, snapshot_id=entry.snapshot_id, sequence_number=entry.sequence_number, @@ -650,48 +606,31 @@ def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: class UpdateSnapshot: _transaction: Transaction _io: FileIO - _branch: str _snapshot_properties: Dict[str, str] - def __init__( - self, - transaction: Transaction, - io: FileIO, - branch: str, - snapshot_properties: Dict[str, str] = EMPTY_DICT, - ) -> None: + def __init__(self, transaction: Transaction, io: FileIO, snapshot_properties: Dict[str, str] = EMPTY_DICT) -> None: self._transaction = transaction self._io = io self._snapshot_properties = snapshot_properties - self._branch = branch def fast_append(self) -> _FastAppendFiles: return _FastAppendFiles( - operation=Operation.APPEND, - transaction=self._transaction, - io=self._io, - branch=self._branch, - snapshot_properties=self._snapshot_properties, + operation=Operation.APPEND, transaction=self._transaction, io=self._io, snapshot_properties=self._snapshot_properties ) def merge_append(self) -> _MergeAppendFiles: return _MergeAppendFiles( - operation=Operation.APPEND, - transaction=self._transaction, - io=self._io, - branch=self._branch, - snapshot_properties=self._snapshot_properties, + operation=Operation.APPEND, transaction=self._transaction, io=self._io, snapshot_properties=self._snapshot_properties ) def overwrite(self, commit_uuid: Optional[uuid.UUID] = None) -> _OverwriteFiles: return _OverwriteFiles( commit_uuid=commit_uuid, operation=Operation.OVERWRITE - if self._transaction.table_metadata.snapshot_by_name(name=self._branch) is not None + if self._transaction.table_metadata.current_snapshot() is not None else Operation.APPEND, transaction=self._transaction, io=self._io, - branch=self._branch, snapshot_properties=self._snapshot_properties, ) @@ -700,7 +639,6 @@ def delete(self) -> _DeleteFiles: operation=Operation.DELETE, transaction=self._transaction, io=self._io, - branch=self._branch, snapshot_properties=self._snapshot_properties, ) @@ -880,7 +818,7 @@ def create_branch( branch_name (str): name of the new branch max_ref_age_ms (Optional[int]): max ref age in milliseconds max_snapshot_age_ms (Optional[int]): max age of snapshots to keep in milliseconds - min_snapshots_to_keep (Optional[int]): min number of snapshots to keep for the branch + min_snapshots_to_keep (Optional[int]): min number of snapshots to keep in milliseconds Returns: This for method chaining """ @@ -906,101 +844,3 @@ def remove_branch(self, branch_name: str) -> ManageSnapshots: This for method chaining """ return self._remove_ref_snapshot(ref_name=branch_name) - - -class ExpireSnapshots(UpdateTableMetadata["ExpireSnapshots"]): - """Expire snapshots by ID. - - Use table.expire_snapshots().().commit() to run a specific operation. - Use table.expire_snapshots().().().commit() to run multiple operations. - Pending changes are applied on commit. - """ - - _snapshot_ids_to_expire: Set[int] = set() - _updates: Tuple[TableUpdate, ...] = () - _requirements: Tuple[TableRequirement, ...] = () - - def _commit(self) -> UpdatesAndRequirements: - """ - Commit the staged updates and requirements. - - This will remove the snapshots with the given IDs, but will always skip protected snapshots (branch/tag heads). - - Returns: - Tuple of updates and requirements to be committed, - as required by the calling parent apply functions. - """ - # Remove any protected snapshot IDs from the set to expire, just in case - protected_ids = self._get_protected_snapshot_ids() - self._snapshot_ids_to_expire -= protected_ids - update = RemoveSnapshotsUpdate(snapshot_ids=self._snapshot_ids_to_expire) - self._updates += (update,) - return self._updates, self._requirements - - def _get_protected_snapshot_ids(self) -> Set[int]: - """ - Get the IDs of protected snapshots. - - These are the HEAD snapshots of all branches and all tagged snapshots. These ids are to be excluded from expiration. - - Returns: - Set of protected snapshot IDs to exclude from expiration. - """ - return { - ref.snapshot_id - for ref in self._transaction.table_metadata.refs.values() - if ref.snapshot_ref_type in [SnapshotRefType.TAG, SnapshotRefType.BRANCH] - } - - def by_id(self, snapshot_id: int) -> ExpireSnapshots: - """ - Expire a snapshot by its ID. - - This will mark the snapshot for expiration. - - Args: - snapshot_id (int): The ID of the snapshot to expire. - Returns: - This for method chaining. - """ - if self._transaction.table_metadata.snapshot_by_id(snapshot_id) is None: - raise ValueError(f"Snapshot with ID {snapshot_id} does not exist.") - - if snapshot_id in self._get_protected_snapshot_ids(): - raise ValueError(f"Snapshot with ID {snapshot_id} is protected and cannot be expired.") - - self._snapshot_ids_to_expire.add(snapshot_id) - - return self - - def by_ids(self, snapshot_ids: List[int]) -> "ExpireSnapshots": - """ - Expire multiple snapshots by their IDs. - - This will mark the snapshots for expiration. - - Args: - snapshot_ids (List[int]): List of snapshot IDs to expire. - Returns: - This for method chaining. - """ - for snapshot_id in snapshot_ids: - self.by_id(snapshot_id) - return self - - def older_than(self, dt: datetime) -> "ExpireSnapshots": - """ - Expire all unprotected snapshots with a timestamp older than a given value. - - Args: - dt (datetime): Only snapshots with datetime < this value will be expired. - - Returns: - This for method chaining. - """ - protected_ids = self._get_protected_snapshot_ids() - expire_from = datetime_to_millis(dt) - for snapshot in self._transaction.table_metadata.snapshots: - if snapshot.timestamp_ms < expire_from and snapshot.snapshot_id not in protected_ids: - self._snapshot_ids_to_expire.add(snapshot.snapshot_id) - return self diff --git a/pyiceberg/table/update/spec.py b/pyiceberg/table/update/spec.py index 1f91aa5d17..b732b2116e 100644 --- a/pyiceberg/table/update/spec.py +++ b/pyiceberg/table/update/spec.py @@ -16,7 +16,15 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Optional, + Set, + Tuple, +) from pyiceberg.expressions import ( Reference, @@ -39,7 +47,7 @@ UpdatesAndRequirements, UpdateTableMetadata, ) -from pyiceberg.transforms import IdentityTransform, TimeTransform, Transform, VoidTransform, parse_transform +from pyiceberg.transforms import IdentityTransform, TimeTransform, Transform, VoidTransform if TYPE_CHECKING: from pyiceberg.table import Transaction @@ -77,13 +85,11 @@ def __init__(self, transaction: Transaction, case_sensitive: bool = True) -> Non def add_field( self, source_column_name: str, - transform: Union[str, Transform[Any, Any]], + transform: Transform[Any, Any], partition_field_name: Optional[str] = None, ) -> UpdateSpec: ref = Reference(source_column_name) bound_ref = ref.bind(self._transaction.table_metadata.schema(), self._case_sensitive) - if isinstance(transform, str): - transform = parse_transform(transform) # verify transform can actually bind it output_type = bound_ref.field.field_type if not transform.can_transform(output_type): diff --git a/pyiceberg/table/update/validate.py b/pyiceberg/table/update/validate.py deleted file mode 100644 index b49c4abe07..0000000000 --- a/pyiceberg/table/update/validate.py +++ /dev/null @@ -1,237 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from typing import Iterator, Optional, Set - -from pyiceberg.exceptions import ValidationException -from pyiceberg.expressions import BooleanExpression -from pyiceberg.expressions.visitors import ROWS_CANNOT_MATCH, _InclusiveMetricsEvaluator -from pyiceberg.manifest import ManifestContent, ManifestEntry, ManifestEntryStatus, ManifestFile -from pyiceberg.schema import Schema -from pyiceberg.table import Table -from pyiceberg.table.snapshots import Operation, Snapshot, ancestors_between -from pyiceberg.typedef import Record - -VALIDATE_DATA_FILES_EXIST_OPERATIONS: Set[Operation] = {Operation.OVERWRITE, Operation.REPLACE, Operation.DELETE} -VALIDATE_ADDED_DATA_FILES_OPERATIONS: Set[Operation] = {Operation.APPEND, Operation.OVERWRITE} - - -def _validation_history( - table: Table, - from_snapshot: Snapshot, - to_snapshot: Snapshot, - matching_operations: set[Operation], - manifest_content_filter: ManifestContent, -) -> tuple[list[ManifestFile], set[int]]: - """Return newly added manifests and snapshot IDs between the starting snapshot and parent snapshot. - - Args: - table: Table to get the history from - from_snapshot: Parent snapshot to get the history from - to_snapshot: Starting snapshot - matching_operations: Operations to match on - manifest_content_filter: Manifest content type to filter - - Raises: - ValidationException: If no matching snapshot is found or only one snapshot is found - - Returns: - List of manifest files and set of snapshots ID's matching conditions - """ - manifests_files: list[ManifestFile] = [] - snapshots: set[int] = set() - - last_snapshot = None - for snapshot in ancestors_between(from_snapshot, to_snapshot, table.metadata): - last_snapshot = snapshot - summary = snapshot.summary - if summary is None: - raise ValidationException(f"No summary found for snapshot {snapshot}!") - if summary.operation not in matching_operations: - continue - - snapshots.add(snapshot.snapshot_id) - # TODO: Maybe do the IO in a separate thread at some point, and collect at the bottom (we can easily merge the sets - manifests_files.extend( - [ - manifest - for manifest in snapshot.manifests(table.io) - if manifest.added_snapshot_id == snapshot.snapshot_id and manifest.content == manifest_content_filter - ] - ) - - if last_snapshot is not None and last_snapshot.snapshot_id != from_snapshot.snapshot_id: - raise ValidationException("No matching snapshot found.") - - return manifests_files, snapshots - - -def _filter_manifest_entries( - entry: ManifestEntry, - snapshot_ids: set[int], - data_filter: Optional[BooleanExpression], - partition_set: Optional[dict[int, set[Record]]], - entry_status: Optional[ManifestEntryStatus], - schema: Schema, -) -> bool: - """Filter manifest entries based on data filter and partition set. - - Args: - entry: Manifest entry to filter - snapshot_ids: set of snapshot ids to match data files - data_filter: Optional filter to match data files - partition_set: Optional set of partitions to match data files - entry_status: Optional status to match data files - schema: schema for filtering - - Returns: - True if the entry should be included, False otherwise - """ - if entry.snapshot_id not in snapshot_ids: - return False - - if entry_status is not None and entry.status != entry_status: - return False - - if data_filter is not None: - evaluator = _InclusiveMetricsEvaluator(schema, data_filter) - if evaluator.eval(entry.data_file) is ROWS_CANNOT_MATCH: - return False - - if partition_set is not None: - partition = entry.data_file.partition - spec_id = entry.data_file.spec_id - if spec_id not in partition_set or partition not in partition_set[spec_id]: - return False - - return True - - -def _deleted_data_files( - table: Table, - starting_snapshot: Snapshot, - data_filter: Optional[BooleanExpression], - partition_set: Optional[dict[int, set[Record]]], - parent_snapshot: Optional[Snapshot], -) -> Iterator[ManifestEntry]: - """Find deleted data files matching a filter since a starting snapshot. - - Args: - table: Table to validate - starting_snapshot: Snapshot current at the start of the operation - data_filter: Expression used to find deleted data files - partition_set: dict of {spec_id: set[partition]} to filter on - parent_snapshot: Ending snapshot on the branch being validated - - Returns: - List of conflicting manifest-entries - """ - # if there is no current table state, no files have been deleted - if parent_snapshot is None: - return - - manifests, snapshot_ids = _validation_history( - table, - parent_snapshot, - starting_snapshot, - VALIDATE_DATA_FILES_EXIST_OPERATIONS, - ManifestContent.DATA, - ) - - for manifest in manifests: - for entry in manifest.fetch_manifest_entry(table.io, discard_deleted=False): - if _filter_manifest_entries( - entry, snapshot_ids, data_filter, partition_set, ManifestEntryStatus.DELETED, table.schema() - ): - yield entry - - -def _validate_deleted_data_files( - table: Table, - starting_snapshot: Snapshot, - data_filter: Optional[BooleanExpression], - parent_snapshot: Snapshot, -) -> None: - """Validate that no files matching a filter have been deleted from the table since a starting snapshot. - - Args: - table: Table to validate - starting_snapshot: Snapshot current at the start of the operation - data_filter: Expression used to find deleted data files - parent_snapshot: Ending snapshot on the branch being validated - - """ - conflicting_entries = _deleted_data_files(table, starting_snapshot, data_filter, None, parent_snapshot) - if any(conflicting_entries): - conflicting_snapshots = {entry.snapshot_id for entry in conflicting_entries} - raise ValidationException(f"Deleted data files were found matching the filter for snapshots {conflicting_snapshots}!") - - -def _added_data_files( - table: Table, - starting_snapshot: Snapshot, - data_filter: Optional[BooleanExpression], - partition_set: Optional[dict[int, set[Record]]], - parent_snapshot: Optional[Snapshot], -) -> Iterator[ManifestEntry]: - """Return manifest entries for data files added between the starting snapshot and parent snapshot. - - Args: - table: Table to get the history from - starting_snapshot: Starting snapshot to get the history from - data_filter: Optional filter to match data files - partition_set: Optional set of partitions to match data files - parent_snapshot: Parent snapshot to get the history from - - Returns: - Iterator of manifest entries for added data files matching the conditions - """ - if parent_snapshot is None: - return - - manifests, snapshot_ids = _validation_history( - table, - parent_snapshot, - starting_snapshot, - VALIDATE_ADDED_DATA_FILES_OPERATIONS, - ManifestContent.DATA, - ) - - for manifest in manifests: - for entry in manifest.fetch_manifest_entry(table.io): - if _filter_manifest_entries(entry, snapshot_ids, data_filter, partition_set, None, table.schema()): - yield entry - - -def _validate_added_data_files( - table: Table, - starting_snapshot: Snapshot, - data_filter: Optional[BooleanExpression], - parent_snapshot: Optional[Snapshot], -) -> None: - """Validate that no files matching a filter have been added to the table since a starting snapshot. - - Args: - table: Table to validate - starting_snapshot: Snapshot current at the start of the operation - data_filter: Expression used to find added data files - parent_snapshot: Ending snapshot on the branch being validated - - """ - conflicting_entries = _added_data_files(table, starting_snapshot, data_filter, None, parent_snapshot) - if any(conflicting_entries): - conflicting_snapshots = {entry.snapshot_id for entry in conflicting_entries if entry.snapshot_id is not None} - raise ValidationException(f"Added data files were found matching the filter for snapshots {conflicting_snapshots}!") diff --git a/pyiceberg/table/upsert_util.py b/pyiceberg/table/upsert_util.py index cefdd101a0..723a89aa20 100644 --- a/pyiceberg/table/upsert_util.py +++ b/pyiceberg/table/upsert_util.py @@ -22,7 +22,7 @@ from pyarrow import compute as pc from pyiceberg.expressions import ( - AlwaysFalse, + And, BooleanExpression, EqualTo, In, @@ -36,16 +36,7 @@ def create_match_filter(df: pyarrow_table, join_cols: list[str]) -> BooleanExpre if len(join_cols) == 1: return In(join_cols[0], unique_keys[0].to_pylist()) else: - filters = [ - functools.reduce(operator.and_, [EqualTo(col, row[col]) for col in join_cols]) for row in unique_keys.to_pylist() - ] - - if len(filters) == 0: - return AlwaysFalse() - elif len(filters) == 1: - return filters[0] - else: - return Or(*filters) + return Or(*[And(*[EqualTo(col, row[col]) for col in join_cols]) for row in unique_keys.to_pylist()]) def has_duplicate_rows(df: pyarrow_table, join_cols: list[str]) -> bool: @@ -57,66 +48,47 @@ def get_rows_to_update(source_table: pa.Table, target_table: pa.Table, join_cols """ Return a table with rows that need to be updated in the target table based on the join columns. - The table is joined on the identifier columns, and then checked if there are any updated rows. - Those are selected and everything is renamed correctly. + When a row is matched, an additional scan is done to evaluate the non-key columns to detect if an actual change has occurred. + Only matched rows that have an actual change to a non-key column value will be returned in the final output. """ all_columns = set(source_table.column_names) join_cols_set = set(join_cols) non_key_cols = list(all_columns - join_cols_set) - if has_duplicate_rows(target_table, join_cols): - raise ValueError("Target table has duplicate rows, aborting upsert") - - if len(target_table) == 0: - # When the target table is empty, there is nothing to update :) - return source_table.schema.empty_table() - - # We need to compare non_key_cols in Python as PyArrow - # 1. Cannot do a join when non-join columns have complex types - # 2. Cannot compare columns with complex types - # See: https://github.com/apache/arrow/issues/35785 - SOURCE_INDEX_COLUMN_NAME = "__source_index" - TARGET_INDEX_COLUMN_NAME = "__target_index" - - if SOURCE_INDEX_COLUMN_NAME in join_cols or TARGET_INDEX_COLUMN_NAME in join_cols: - raise ValueError( - f"{SOURCE_INDEX_COLUMN_NAME} and {TARGET_INDEX_COLUMN_NAME} are reserved for joining " - f"DataFrames, and cannot be used as column names" - ) from None - - # Step 1: Prepare source index with join keys and a marker index - # Cast to target table schema, so we can do the join - # See: https://github.com/apache/arrow/issues/37542 - source_index = ( - source_table.cast(target_table.schema) - .select(join_cols_set) - .append_column(SOURCE_INDEX_COLUMN_NAME, pa.array(range(len(source_table)))) - ) - - # Step 2: Prepare target index with join keys and a marker - target_index = target_table.select(join_cols_set).append_column(TARGET_INDEX_COLUMN_NAME, pa.array(range(len(target_table)))) - - # Step 3: Perform an inner join to find which rows from source exist in target - matching_indices = source_index.join(target_index, keys=list(join_cols_set), join_type="inner") - - # Step 4: Compare all rows using Python - to_update_indices = [] - for source_idx, target_idx in zip( - matching_indices[SOURCE_INDEX_COLUMN_NAME].to_pylist(), matching_indices[TARGET_INDEX_COLUMN_NAME].to_pylist() - ): - source_row = source_table.slice(source_idx, 1) - target_row = target_table.slice(target_idx, 1) - - for key in non_key_cols: - source_val = source_row.column(key)[0].as_py() - target_val = target_row.column(key)[0].as_py() - if source_val != target_val: - to_update_indices.append(source_idx) - break - - # Step 5: Take rows from source table using the indices and cast to target schema - if to_update_indices: - return source_table.take(to_update_indices) + match_expr = functools.reduce(operator.and_, [pc.field(col).isin(target_table.column(col).to_pylist()) for col in join_cols]) + + matching_source_rows = source_table.filter(match_expr) + + rows_to_update = [] + + for index in range(matching_source_rows.num_rows): + source_row = matching_source_rows.slice(index, 1) + + target_filter = functools.reduce(operator.and_, [pc.field(col) == source_row.column(col)[0].as_py() for col in join_cols]) + + matching_target_row = target_table.filter(target_filter) + + if matching_target_row.num_rows > 0: + needs_update = False + + for non_key_col in non_key_cols: + source_value = source_row.column(non_key_col)[0].as_py() + target_value = matching_target_row.column(non_key_col)[0].as_py() + + if source_value != target_value: + needs_update = True + break + + if needs_update: + rows_to_update.append(source_row) + + if rows_to_update: + rows_to_update_table = pa.concat_tables(rows_to_update) else: - return source_table.schema.empty_table() + rows_to_update_table = pa.Table.from_arrays([], names=source_table.column_names) + + common_columns = set(source_table.column_names).intersection(set(target_table.column_names)) + rows_to_update_table = rows_to_update_table.select(list(common_columns)) + + return rows_to_update_table diff --git a/pyiceberg/transforms.py b/pyiceberg/transforms.py index 3f5a8d8998..b8f0b975e6 100644 --- a/pyiceberg/transforms.py +++ b/pyiceberg/transforms.py @@ -17,9 +17,7 @@ import base64 import datetime as py_datetime -import importlib import struct -import types from abc import ABC, abstractmethod from enum import IntEnum from functools import singledispatch @@ -30,7 +28,6 @@ import mmh3 from pydantic import Field, PositiveInt, PrivateAttr -from pyiceberg.exceptions import NotInstalledError from pyiceberg.expressions import ( BoundEqualTo, BoundGreaterThan, @@ -76,9 +73,7 @@ IntegerType, LongType, StringType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, UUIDType, @@ -109,50 +104,32 @@ TRUNCATE_PARSER = ParseNumberFromBrackets(TRUNCATE) -def _try_import(module_name: str, extras_name: Optional[str] = None) -> types.ModuleType: - try: - return importlib.import_module(module_name) - except ImportError: - if extras_name: - msg = f'{module_name} needs to be installed. pip install "pyiceberg[{extras_name}]"' - else: - msg = f"{module_name} needs to be installed." - raise NotInstalledError(msg) from None - - def _transform_literal(func: Callable[[L], L], lit: Literal[L]) -> Literal[L]: """Small helper to upwrap the value from the literal, and wrap it again.""" return literal(func(lit.value)) -def _pyiceberg_transform_wrapper( - transform_func: Callable[["ArrayLike", Any], "ArrayLike"], - *args: Any, - expected_type: Optional["pa.DataType"] = None, -) -> Callable[["ArrayLike"], "ArrayLike"]: - try: - import pyarrow as pa - except ModuleNotFoundError as e: - raise ModuleNotFoundError("For partition transforms, PyArrow needs to be installed") from e - - def _transform(array: "ArrayLike") -> "ArrayLike": - def _cast_if_needed(arr: "ArrayLike") -> "ArrayLike": - if expected_type is not None: - return arr.cast(expected_type) - else: - return arr - - if isinstance(array, pa.Array): - return _cast_if_needed(transform_func(array, *args)) - elif isinstance(array, pa.ChunkedArray): - result_chunks = [] - for arr in array.iterchunks(): - result_chunks.append(_cast_if_needed(transform_func(arr, *args))) - return pa.chunked_array(result_chunks) +def parse_transform(v: Any) -> Any: + if isinstance(v, str): + if v == IDENTITY: + return IdentityTransform() + elif v == VOID: + return VoidTransform() + elif v.startswith(BUCKET): + return BucketTransform(num_buckets=BUCKET_PARSER.match(v)) + elif v.startswith(TRUNCATE): + return TruncateTransform(width=TRUNCATE_PARSER.match(v)) + elif v == YEAR: + return YearTransform() + elif v == MONTH: + return MonthTransform() + elif v == DAY: + return DayTransform() + elif v == HOUR: + return HourTransform() else: - raise ValueError(f"PyArrow array can only be of type pa.Array or pa.ChunkedArray, but found {type(array)}") - - return _transform + return UnknownTransform(transform=v) + return v class Transform(IcebergRootModel[str], ABC, Generic[S, T]): @@ -219,28 +196,26 @@ def supports_pyarrow_transform(self) -> bool: @abstractmethod def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": ... + def _pyiceberg_transform_wrapper( + self, transform_func: Callable[["ArrayLike", Any], "ArrayLike"], *args: Any + ) -> Callable[["ArrayLike"], "ArrayLike"]: + try: + import pyarrow as pa + except ModuleNotFoundError as e: + raise ModuleNotFoundError("For bucket/truncate transforms, PyArrow needs to be installed") from e + + def _transform(array: "ArrayLike") -> "ArrayLike": + if isinstance(array, pa.Array): + return transform_func(array, *args) + elif isinstance(array, pa.ChunkedArray): + result_chunks = [] + for arr in array.iterchunks(): + result_chunks.append(transform_func(arr, *args)) + return pa.chunked_array(result_chunks) + else: + raise ValueError(f"PyArrow array can only be of type pa.Array or pa.ChunkedArray, but found {type(array)}") -def parse_transform(v: Any) -> Transform[Any, Any]: - if isinstance(v, str): - if v == IDENTITY: - return IdentityTransform() - elif v == VOID: - return VoidTransform() - elif v.startswith(BUCKET): - return BucketTransform(num_buckets=BUCKET_PARSER.match(v)) - elif v.startswith(TRUNCATE): - return TruncateTransform(width=TRUNCATE_PARSER.match(v)) - elif v == YEAR: - return YearTransform() - elif v == MONTH: - return MonthTransform() - elif v == DAY: - return DayTransform() - elif v == HOUR: - return HourTransform() - else: - return UnknownTransform(transform=v) - return v + return _transform class BucketTransform(Transform[S, int]): @@ -257,8 +232,8 @@ class BucketTransform(Transform[S, int]): _num_buckets: PositiveInt = PrivateAttr() def __init__(self, num_buckets: int, **data: Any) -> None: - super().__init__(f"bucket[{num_buckets}]", **data) self._num_buckets = num_buckets + super().__init__(f"bucket[{num_buckets}]", **data) @property def num_buckets(self) -> int: @@ -315,8 +290,6 @@ def can_transform(self, source: IcebergType) -> bool: TimeType, TimestampType, TimestamptzType, - TimestampNanoType, - TimestamptzNanoType, DecimalType, StringType, FixedType, @@ -350,18 +323,6 @@ def hash_func(v: Any) -> int: return mmh3.hash(struct.pack(" int: - # In order to bucket TimestampNano the same as Timestamp - # convert to micros before hashing. - if isinstance(v, py_datetime.datetime): - v = datetime.datetime_to_micros(v) - else: - v = datetime.nanos_to_micros(v) - - return mmh3.hash(struct.pack(" int: @@ -396,8 +357,9 @@ def __repr__(self) -> str: return f"BucketTransform(num_buckets={self._num_buckets})" def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": - pyiceberg_core_transform = _try_import("pyiceberg_core", extras_name="pyiceberg-core").transform - return _pyiceberg_transform_wrapper(pyiceberg_core_transform.bucket, self._num_buckets) + from pyiceberg_core import transform as pyiceberg_core_transform + + return self._pyiceberg_transform_wrapper(pyiceberg_core_transform.bucket, self._num_buckets) @property def supports_pyarrow_transform(self) -> bool: @@ -495,20 +457,13 @@ def year_func(v: Any) -> int: return datetime.micros_to_years(v) - elif isinstance(source, (TimestampNanoType, TimestamptzNanoType)): - - def year_func(v: Any) -> int: - # python datetime has no nanoseconds support. - # nanosecond datetimes will be expressed as int as a workaround - return datetime.nanos_to_years(v) - else: raise ValueError(f"Cannot apply year transform for type: {source}") return lambda v: year_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: - return isinstance(source, (DateType, TimestampType, TimestamptzType, TimestampNanoType, TimestamptzNanoType)) + return isinstance(source, (DateType, TimestampType, TimestamptzType)) @property def granularity(self) -> TimeResolution: @@ -522,9 +477,19 @@ def __repr__(self) -> str: return "YearTransform()" def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": - pa = _try_import("pyarrow") - pyiceberg_core_transform = _try_import("pyiceberg_core", extras_name="pyiceberg-core").transform - return _pyiceberg_transform_wrapper(pyiceberg_core_transform.year, expected_type=pa.int32()) + import pyarrow as pa + import pyarrow.compute as pc + + if isinstance(source, DateType): + epoch = datetime.EPOCH_DATE + elif isinstance(source, TimestampType): + epoch = datetime.EPOCH_TIMESTAMP + elif isinstance(source, TimestamptzType): + epoch = datetime.EPOCH_TIMESTAMPTZ + else: + raise ValueError(f"Cannot apply year transform for type: {source}") + + return lambda v: pc.years_between(pa.scalar(epoch), v) if v is not None else None class MonthTransform(TimeTransform[S]): @@ -555,20 +520,13 @@ def month_func(v: Any) -> int: return datetime.micros_to_months(v) - elif isinstance(source, (TimestampNanoType, TimestamptzNanoType)): - - def month_func(v: Any) -> int: - # python datetime has no nanoseconds support. - # nanosecond datetimes will be expressed as int as a workaround - return datetime.nanos_to_months(v) - else: raise ValueError(f"Cannot apply month transform for type: {source}") return lambda v: month_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: - return isinstance(source, (DateType, TimestampType, TimestamptzType, TimestampNanoType, TimestamptzNanoType)) + return isinstance(source, (DateType, TimestampType, TimestamptzType)) @property def granularity(self) -> TimeResolution: @@ -582,10 +540,25 @@ def __repr__(self) -> str: return "MonthTransform()" def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": - pa = _try_import("pyarrow") - pyiceberg_core_transform = _try_import("pyiceberg_core", extras_name="pyiceberg-core").transform + import pyarrow as pa + import pyarrow.compute as pc + + if isinstance(source, DateType): + epoch = datetime.EPOCH_DATE + elif isinstance(source, TimestampType): + epoch = datetime.EPOCH_TIMESTAMP + elif isinstance(source, TimestamptzType): + epoch = datetime.EPOCH_TIMESTAMPTZ + else: + raise ValueError(f"Cannot apply month transform for type: {source}") - return _pyiceberg_transform_wrapper(pyiceberg_core_transform.month, expected_type=pa.int32()) + def month_func(v: pa.Array) -> pa.Array: + return pc.add( + pc.multiply(pc.years_between(pa.scalar(epoch), v), pa.scalar(12)), + pc.add(pc.month(v), pa.scalar(-1)), + ) + + return lambda v: month_func(v) if v is not None else None class DayTransform(TimeTransform[S]): @@ -616,20 +589,13 @@ def day_func(v: Any) -> int: return datetime.micros_to_days(v) - elif isinstance(source, (TimestampNanoType, TimestamptzNanoType)): - - def day_func(v: Any) -> int: - # python datetime has no nanoseconds support. - # nanosecond datetimes will be expressed as int as a workaround - return datetime.nanos_to_days(v) - else: raise ValueError(f"Cannot apply day transform for type: {source}") return lambda v: day_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: - return isinstance(source, (DateType, TimestampType, TimestamptzType, TimestampNanoType, TimestamptzNanoType)) + return isinstance(source, (DateType, TimestampType, TimestamptzType)) def result_type(self, source: IcebergType) -> IcebergType: """Return the result type of a day transform. @@ -651,10 +617,19 @@ def __repr__(self) -> str: return "DayTransform()" def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": - pa = _try_import("pyarrow", extras_name="pyarrow") - pyiceberg_core_transform = _try_import("pyiceberg_core", extras_name="pyiceberg-core").transform + import pyarrow as pa + import pyarrow.compute as pc + + if isinstance(source, DateType): + epoch = datetime.EPOCH_DATE + elif isinstance(source, TimestampType): + epoch = datetime.EPOCH_TIMESTAMP + elif isinstance(source, TimestamptzType): + epoch = datetime.EPOCH_TIMESTAMPTZ + else: + raise ValueError(f"Cannot apply day transform for type: {source}") - return _pyiceberg_transform_wrapper(pyiceberg_core_transform.day, expected_type=pa.int32()) + return lambda v: pc.days_between(pa.scalar(epoch), v) if v is not None else None class HourTransform(TimeTransform[S]): @@ -677,20 +652,13 @@ def hour_func(v: Any) -> int: return datetime.micros_to_hours(v) - elif isinstance(source, (TimestampNanoType, TimestamptzNanoType)): - - def hour_func(v: Any) -> int: - # python datetime has no nanoseconds support. - # nanosecond datetimes will be expressed as int as a workaround - return datetime.nanos_to_hours(v) - else: raise ValueError(f"Cannot apply hour transform for type: {source}") return lambda v: hour_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: - return isinstance(source, (TimestampType, TimestamptzType, TimestampNanoType, TimestamptzNanoType)) + return isinstance(source, (TimestampType, TimestamptzType)) @property def granularity(self) -> TimeResolution: @@ -704,9 +672,17 @@ def __repr__(self) -> str: return "HourTransform()" def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": - pyiceberg_core_transform = _try_import("pyiceberg_core", extras_name="pyiceberg-core").transform + import pyarrow as pa + import pyarrow.compute as pc + + if isinstance(source, TimestampType): + epoch = datetime.EPOCH_TIMESTAMP + elif isinstance(source, TimestamptzType): + epoch = datetime.EPOCH_TIMESTAMPTZ + else: + raise ValueError(f"Cannot apply hour transform for type: {source}") - return _pyiceberg_transform_wrapper(pyiceberg_core_transform.hour) + return lambda v: pc.hours_between(pa.scalar(epoch), v) if v is not None else None def _base64encode(buffer: bytes) -> str: @@ -841,11 +817,10 @@ def strict_project(self, name: str, pred: BoundPredicate[Any]) -> Optional[Unbou if isinstance(pred.term, BoundTransform): return _project_transform_predicate(self, name, pred) - if isinstance(pred, BoundUnaryPredicate): - return pred.as_unbound(Reference(name)) - if isinstance(field_type, (IntegerType, LongType, DecimalType)): - if isinstance(pred, BoundLiteralPredicate): + if isinstance(pred, BoundUnaryPredicate): + return pred.as_unbound(Reference(name)) + elif isinstance(pred, BoundLiteralPredicate): return _truncate_number_strict(name, pred, self.transform(field_type)) elif isinstance(pred, BoundNotIn): return _set_apply_transform(name, pred, self.transform(field_type)) @@ -927,9 +902,9 @@ def __repr__(self) -> str: return f"TruncateTransform(width={self._width})" def pyarrow_transform(self, source: IcebergType) -> "Callable[[pa.Array], pa.Array]": - pyiceberg_core_transform = _try_import("pyiceberg_core", extras_name="pyiceberg-core").transform + from pyiceberg_core import transform as pyiceberg_core_transform - return _pyiceberg_transform_wrapper(pyiceberg_core_transform.truncate, self._width) + return self._pyiceberg_transform_wrapper(pyiceberg_core_transform.truncate, self._width) @property def supports_pyarrow_transform(self) -> bool: @@ -1061,11 +1036,11 @@ def _truncate_number( raise ValueError(f"Expected a numeric literal, got: {type(boundary)}") if isinstance(pred, BoundLessThan): - return LessThanOrEqual(Reference(name), _transform_literal(transform, boundary.decrement())) + return LessThanOrEqual(Reference(name), _transform_literal(transform, boundary.decrement())) # type: ignore elif isinstance(pred, BoundLessThanOrEqual): return LessThanOrEqual(Reference(name), _transform_literal(transform, boundary)) elif isinstance(pred, BoundGreaterThan): - return GreaterThanOrEqual(Reference(name), _transform_literal(transform, boundary.increment())) + return GreaterThanOrEqual(Reference(name), _transform_literal(transform, boundary.increment())) # type: ignore elif isinstance(pred, BoundGreaterThanOrEqual): return GreaterThanOrEqual(Reference(name), _transform_literal(transform, boundary)) elif isinstance(pred, BoundEqualTo): @@ -1085,11 +1060,11 @@ def _truncate_number_strict( if isinstance(pred, BoundLessThan): return LessThan(Reference(name), _transform_literal(transform, boundary)) elif isinstance(pred, BoundLessThanOrEqual): - return LessThan(Reference(name), _transform_literal(transform, boundary.increment())) + return LessThan(Reference(name), _transform_literal(transform, boundary.increment())) # type: ignore elif isinstance(pred, BoundGreaterThan): return GreaterThan(Reference(name), _transform_literal(transform, boundary)) elif isinstance(pred, BoundGreaterThanOrEqual): - return GreaterThan(Reference(name), _transform_literal(transform, boundary.decrement())) + return GreaterThan(Reference(name), _transform_literal(transform, boundary.decrement())) # type: ignore elif isinstance(pred, BoundNotEqualTo): return NotEqualTo(Reference(name), _transform_literal(transform, boundary)) elif isinstance(pred, BoundEqualTo): diff --git a/pyiceberg/typedef.py b/pyiceberg/typedef.py index d9ace9d971..07374887a3 100644 --- a/pyiceberg/typedef.py +++ b/pyiceberg/typedef.py @@ -17,15 +17,15 @@ from __future__ import annotations from abc import abstractmethod -from datetime import date, datetime, time +from datetime import date, datetime from decimal import Decimal +from functools import lru_cache from typing import ( TYPE_CHECKING, Any, Callable, Dict, Generic, - List, Literal, Optional, Protocol, @@ -38,7 +38,7 @@ from uuid import UUID from pydantic import BaseModel, ConfigDict, RootModel -from typing_extensions import Self, TypeAlias +from typing_extensions import TypeAlias if TYPE_CHECKING: from pyiceberg.types import StructType @@ -94,7 +94,7 @@ def __missing__(self, key: K) -> V: """A recursive dictionary type for nested structures in PyIceberg.""" # Represents the literal value -L = TypeVar("L", str, bool, int, float, bytes, UUID, Decimal, datetime, date, time, covariant=True) +L = TypeVar("L", str, bool, int, float, bytes, UUID, Decimal, datetime, date, covariant=True) @runtime_checkable @@ -171,36 +171,51 @@ class IcebergRootModel(RootModel[T], Generic[T]): model_config = ConfigDict(frozen=True) +@lru_cache +def _get_struct_fields(struct_type: StructType) -> Tuple[str, ...]: + return tuple(field.name for field in struct_type.fields) + + class Record(StructProtocol): - __slots__ = ("_data",) - _data: List[Any] + __slots__ = ("_position_to_field_name",) + _position_to_field_name: Tuple[str, ...] + + def __init__(self, *data: Any, struct: Optional[StructType] = None, **named_data: Any) -> None: + if struct is not None: + self._position_to_field_name = _get_struct_fields(struct) + elif named_data: + # Order of named_data is preserved (PEP 468) so this can be used to generate the position dict + self._position_to_field_name = tuple(named_data.keys()) + else: + self._position_to_field_name = tuple(f"field{idx + 1}" for idx in range(len(data))) - @classmethod - def _bind(cls, struct: StructType, **arguments: Any) -> Self: - return cls(*[arguments[field.name] if field.name in arguments else field.initial_default for field in struct.fields]) + for idx, d in enumerate(data): + self[idx] = d - def __init__(self, *data: Any) -> None: - self._data = list(data) + for field_name, d in named_data.items(): + self.__setattr__(field_name, d) def __setitem__(self, pos: int, value: Any) -> None: """Assign a value to a Record.""" - self._data[pos] = value + self.__setattr__(self._position_to_field_name[pos], value) def __getitem__(self, pos: int) -> Any: """Fetch a value from a Record.""" - return self._data[pos] + return self.__getattribute__(self._position_to_field_name[pos]) def __eq__(self, other: Any) -> bool: """Return the equality of two instances of the Record class.""" - return self._data == other._data if isinstance(other, Record) else False + if not isinstance(other, Record): + return False + return self.__dict__ == other.__dict__ def __repr__(self) -> str: """Return the string representation of the Record class.""" - return f"{self.__class__.__name__}[{', '.join(str(v) for v in self._data)}]" + return f"{self.__class__.__name__}[{', '.join(f'{key}={repr(value)}' for key, value in self.__dict__.items() if not key.startswith('_'))}]" def __len__(self) -> int: """Return the number of fields in the Record class.""" - return len(self._data) + return len(self._position_to_field_name) def __hash__(self) -> int: """Return hash value of the Record class.""" diff --git a/pyiceberg/types.py b/pyiceberg/types.py index 6872663f84..bd0eb7a5e9 100644 --- a/pyiceberg/types.py +++ b/pyiceberg/types.py @@ -35,7 +35,6 @@ import re from functools import cached_property from typing import ( - Annotated, Any, ClassVar, Dict, @@ -45,18 +44,16 @@ ) from pydantic import ( - BeforeValidator, Field, PrivateAttr, SerializeAsAny, - field_validator, model_serializer, model_validator, ) -from pydantic_core.core_schema import ValidationInfo, ValidatorFunctionWrapHandler +from pydantic_core.core_schema import ValidatorFunctionWrapHandler from pyiceberg.exceptions import ValidationError -from pyiceberg.typedef import IcebergBaseModel, IcebergRootModel, L, TableVersion +from pyiceberg.typedef import IcebergBaseModel, IcebergRootModel, L from pyiceberg.utils.parsing import ParseNumberFromBrackets from pyiceberg.utils.singleton import Singleton @@ -143,10 +140,6 @@ def handle_primitive_type(cls, v: Any, handler: ValidatorFunctionWrapHandler) -> return TimestampType() if v == "timestamptz": return TimestamptzType() - if v == "timestamp_ns": - return TimestampNanoType() - if v == "timestamptz_ns": - return TimestamptzNanoType() if v == "date": return DateType() if v == "time": @@ -155,15 +148,13 @@ def handle_primitive_type(cls, v: Any, handler: ValidatorFunctionWrapHandler) -> return UUIDType() if v == "binary": return BinaryType() - if v == "unknown": - return UnknownType() if v.startswith("fixed"): return FixedType(_parse_fixed_type(v)) if v.startswith("decimal"): precision, scale = _parse_decimal_type(v) return DecimalType(precision, scale) else: - raise ValueError(f"Type not recognized: {v}") + raise ValueError(f"Unknown type: {v}") if isinstance(v, dict) and cls == IcebergType: complex_type = v.get("type") if complex_type == "list": @@ -184,10 +175,6 @@ def is_primitive(self) -> bool: def is_struct(self) -> bool: return isinstance(self, StructType) - def minimum_format_version(self) -> TableVersion: - """Minimum Iceberg format version after which this type is supported.""" - return 1 - class PrimitiveType(Singleton, IcebergRootModel[str], IcebergType): """Base class for all Iceberg Primitive Types.""" @@ -292,18 +279,6 @@ def __eq__(self, other: Any) -> bool: return self.root == other.root if isinstance(other, DecimalType) else False -def _deserialize_default_value(v: Any, context: ValidationInfo) -> Any: - if v is not None: - from pyiceberg.conversions import from_json - - return from_json(context.data.get("field_type"), v) - else: - return None - - -DefaultValue = Annotated[L, BeforeValidator(_deserialize_default_value)] - - class NestedField(IcebergType): """Represents a field of a struct, a map key, a map value, or a list element. @@ -325,14 +300,6 @@ class NestedField(IcebergType): ... doc="Just a long" ... )) '2: bar: required long (Just a long)' - >>> str(NestedField( - ... field_id=3, - ... name='baz', - ... field_type="string", - ... required=True, - ... doc="A string field" - ... )) - '3: baz: required string (A string field)' """ field_id: int = Field(alias="id") @@ -340,24 +307,14 @@ class NestedField(IcebergType): field_type: SerializeAsAny[IcebergType] = Field(alias="type") required: bool = Field(default=False) doc: Optional[str] = Field(default=None, repr=False) - initial_default: Optional[DefaultValue] = Field(alias="initial-default", default=None, repr=True) # type: ignore - write_default: Optional[DefaultValue] = Field(alias="write-default", default=None, repr=True) # type: ignore - - @field_validator("field_type", mode="before") - def convert_field_type(cls, v: Any) -> IcebergType: - """Convert string values into IcebergType instances.""" - if isinstance(v, str): - try: - return IcebergType.handle_primitive_type(v, None) - except ValueError as e: - raise ValueError(f"Unsupported field type: '{v}'") from e - return v + initial_default: Optional[Any] = Field(alias="initial-default", default=None, repr=False) + write_default: Optional[L] = Field(alias="write-default", default=None, repr=False) # type: ignore def __init__( self, field_id: Optional[int] = None, name: Optional[str] = None, - field_type: Optional[IcebergType | str] = None, + field_type: Optional[IcebergType] = None, required: bool = False, doc: Optional[str] = None, initial_default: Optional[Any] = None, @@ -375,47 +332,12 @@ def __init__( data["write-default"] = data["write-default"] if "write-default" in data else write_default super().__init__(**data) - @model_serializer() - def serialize_model(self) -> Dict[str, Any]: - from pyiceberg.conversions import to_json - - fields = { - "id": self.field_id, - "name": self.name, - "type": self.field_type, - "required": self.required, - } - - if self.doc is not None: - fields["doc"] = self.doc - if self.initial_default is not None: - fields["initial-default"] = to_json(self.field_type, self.initial_default) - if self.write_default is not None: - fields["write-default"] = to_json(self.field_type, self.write_default) - - return fields - def __str__(self) -> str: """Return the string representation of the NestedField class.""" doc = "" if not self.doc else f" ({self.doc})" req = "required" if self.required else "optional" return f"{self.field_id}: {self.name}: {req} {self.field_type}{doc}" - def __repr__(self) -> str: - """Return the string representation of the NestedField class.""" - parts = [ - f"field_id={self.field_id}", - f"name={self.name!r}", - f"field_type={self.field_type!r}", - f"required={self.required}", - ] - if self.initial_default is not None: - parts.append(f"initial_default={self.initial_default!r}") - if self.write_default is not None: - parts.append(f"write_default={self.write_default!r}") - - return f"NestedField({', '.join(parts)})" - def __getnewargs__(self) -> Tuple[int, str, IcebergType, bool, Optional[str]]: """Pickle the NestedField class.""" return (self.field_id, self.name, self.field_type, self.required, self.doc) @@ -779,44 +701,6 @@ class TimestamptzType(PrimitiveType): root: Literal["timestamptz"] = Field(default="timestamptz") -class TimestampNanoType(PrimitiveType): - """A TimestampNano data type in Iceberg can be represented using an instance of this class. - - TimestampNanos in Iceberg have nanosecond precision and include a date and a time of day without a timezone. - - Example: - >>> column_foo = TimestampNanoType() - >>> isinstance(column_foo, TimestampNanoType) - True - >>> column_foo - TimestampNanoType() - """ - - root: Literal["timestamp_ns"] = Field(default="timestamp_ns") - - def minimum_format_version(self) -> TableVersion: - return 3 - - -class TimestamptzNanoType(PrimitiveType): - """A TimestamptzNano data type in Iceberg can be represented using an instance of this class. - - TimestamptzNanos in Iceberg are stored as UTC and include a date and a time of day with a timezone. - - Example: - >>> column_foo = TimestamptzNanoType() - >>> isinstance(column_foo, TimestamptzNanoType) - True - >>> column_foo - TimestamptzNanoType() - """ - - root: Literal["timestamptz_ns"] = Field(default="timestamptz_ns") - - def minimum_format_version(self) -> TableVersion: - return 3 - - class StringType(PrimitiveType): """A String data type in Iceberg can be represented using an instance of this class. @@ -863,22 +747,3 @@ class BinaryType(PrimitiveType): """ root: Literal["binary"] = Field(default="binary") - - -class UnknownType(PrimitiveType): - """An unknown data type in Iceberg can be represented using an instance of this class. - - Unknowns in Iceberg are used to represent data types that are not known at the time of writing. - - Example: - >>> column_foo = UnknownType() - >>> isinstance(column_foo, UnknownType) - True - >>> column_foo - UnknownType() - """ - - root: Literal["unknown"] = Field(default="unknown") - - def minimum_format_version(self) -> TableVersion: - return 3 diff --git a/pyiceberg/utils/concurrent.py b/pyiceberg/utils/concurrent.py index 751cbd9bbb..805599bf41 100644 --- a/pyiceberg/utils/concurrent.py +++ b/pyiceberg/utils/concurrent.py @@ -25,11 +25,6 @@ class ExecutorFactory: _instance: Optional[Executor] = None - @staticmethod - def max_workers() -> Optional[int]: - """Return the max number of workers configured.""" - return Config().get_int("max-workers") - @staticmethod def get_or_create() -> Executor: """Return the same executor in each call.""" @@ -38,3 +33,8 @@ def get_or_create() -> Executor: ExecutorFactory._instance = ThreadPoolExecutor(max_workers=max_workers) return ExecutorFactory._instance + + @staticmethod + def max_workers() -> Optional[int]: + """Return the max number of workers configured.""" + return Config().get_int("max-workers") diff --git a/pyiceberg/utils/config.py b/pyiceberg/utils/config.py index 78f121a402..0c162777d6 100644 --- a/pyiceberg/utils/config.py +++ b/pyiceberg/utils/config.py @@ -154,17 +154,10 @@ def get_catalog_config(self, catalog_name: str) -> Optional[RecursiveDict]: raise ValueError(f"Catalog configurations needs to be an object: {catalog_name}") if catalog_name_lower in catalogs: catalog_conf = catalogs[catalog_name_lower] - if not isinstance(catalog_conf, dict): - raise ValueError(f"Configuration path catalogs.{catalog_name_lower} needs to be an object") + assert isinstance(catalog_conf, dict), f"Configuration path catalogs.{catalog_name_lower} needs to be an object" return catalog_conf return None - def get_known_catalogs(self) -> List[str]: - catalogs = self.config.get(CATALOG, {}) - if not isinstance(catalogs, dict): - raise ValueError("Catalog configurations needs to be an object") - return list(catalogs.keys()) - def get_int(self, key: str) -> Optional[int]: if (val := self.config.get(key)) is not None: try: diff --git a/pyiceberg/utils/datetime.py b/pyiceberg/utils/datetime.py index 46bbb32dcd..0cb6926efa 100644 --- a/pyiceberg/utils/datetime.py +++ b/pyiceberg/utils/datetime.py @@ -29,10 +29,8 @@ EPOCH_DATE = date.fromisoformat("1970-01-01") EPOCH_TIMESTAMP = datetime.fromisoformat("1970-01-01T00:00:00.000000") ISO_TIMESTAMP = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(.\d{1,6})?") -ISO_TIMESTAMP_NANO = re.compile(r"(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(.\d{1,6})?(\d{1,3})?") EPOCH_TIMESTAMPTZ = datetime.fromisoformat("1970-01-01T00:00:00.000000+00:00") ISO_TIMESTAMPTZ = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(.\d{1,6})?[-+]\d{2}:\d{2}") -ISO_TIMESTAMPTZ_NANO = re.compile(r"(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(.\d{1,6})?(\d{1,3})?([-+]\d{2}:\d{2})") def micros_to_days(timestamp: int) -> int: @@ -93,59 +91,6 @@ def timestamp_to_micros(timestamp_str: str) -> int: raise ValueError(f"Invalid timestamp without zone: {timestamp_str} (must be ISO-8601)") -def time_str_to_nanos(time_str: str) -> int: - """Convert an ISO-8601 formatted time to nanoseconds from midnight.""" - return time_to_nanos(time.fromisoformat(time_str)) - - -def time_to_nanos(t: time) -> int: - """Convert a datetime.time object to nanoseconds from midnight.""" - # python datetime and time doesn't have nanoseconds support yet - # https://github.com/python/cpython/issues/59648 - return ((((t.hour * 60 + t.minute) * 60) + t.second) * 1_000_000 + t.microsecond) * 1_000 - - -def datetime_to_nanos(dt: datetime) -> int: - """Convert a datetime to nanoseconds from 1970-01-01T00:00:00.000000000.""" - # python datetime and time doesn't have nanoseconds support yet - # https://github.com/python/cpython/issues/59648 - if dt.tzinfo: - delta = dt - EPOCH_TIMESTAMPTZ - else: - delta = dt - EPOCH_TIMESTAMP - return ((delta.days * 86400 + delta.seconds) * 1_000_000 + delta.microseconds) * 1_000 - - -def timestamp_to_nanos(timestamp_str: str) -> int: - """Convert an ISO-9601 formatted timestamp without zone to nanoseconds from 1970-01-01T00:00:00.000000000.""" - if match := ISO_TIMESTAMP_NANO.fullmatch(timestamp_str): - # Python datetime does not have native nanoseconds support - # Hence we need to extract nanoseconds timestamp manually - ns_str = match.group(3) or "0" - ms_str = match.group(2) if match.group(2) else "" - timestamp_str_without_ns_str = match.group(1) + ms_str - return datetime_to_nanos(datetime.fromisoformat(timestamp_str_without_ns_str)) + int(ns_str) - if ISO_TIMESTAMPTZ_NANO.fullmatch(timestamp_str): - # When we can match a timestamp without a zone, we can give a more specific error - raise ValueError(f"Zone offset provided, but not expected: {timestamp_str}") - raise ValueError(f"Invalid timestamp without zone: {timestamp_str} (must be ISO-8601)") - - -def timestamptz_to_nanos(timestamptz_str: str) -> int: - """Convert an ISO-8601 formatted timestamp with zone to nanoseconds from 1970-01-01T00:00:00.000000000+00:00.""" - if match := ISO_TIMESTAMPTZ_NANO.fullmatch(timestamptz_str): - # Python datetime does not have native nanoseconds support - # Hence we need to extract nanoseconds timestamp manually - ns_str = match.group(3) or "0" - ms_str = match.group(2) if match.group(2) else "" - timestamptz_str_without_ns_str = match.group(1) + ms_str + match.group(4) - return datetime_to_nanos(datetime.fromisoformat(timestamptz_str_without_ns_str)) + int(ns_str) - if ISO_TIMESTAMPTZ_NANO.fullmatch(timestamptz_str): - # When we can match a timestamp without a zone, we can give a more specific error - raise ValueError(f"Missing zone offset: {timestamptz_str} (must be ISO-8601)") - raise ValueError(f"Invalid timestamp with zone: {timestamptz_str} (must be ISO-8601)") - - def datetime_to_millis(dt: datetime) -> int: """Convert a datetime to milliseconds from 1970-01-01T00:00:00.000000.""" if dt.tzinfo: @@ -239,43 +184,3 @@ def days_to_years(days: int) -> int: def micros_to_years(micros: int) -> int: return micros_to_timestamp(micros).year - EPOCH_TIMESTAMP.year - - -def nanos_to_timestamp(nanos: int) -> datetime: - """Convert nanoseconds from epoch to a microsecond timestamp.""" - dt = timedelta(microseconds=nanos_to_micros(nanos)) - return EPOCH_TIMESTAMP + dt - - -def nanos_to_years(nanos: int) -> int: - return nanos_to_timestamp(nanos).year - EPOCH_TIMESTAMP.year - - -def nanos_to_months(nanos: int) -> int: - dt = nanos_to_timestamp(nanos) - return (dt.year - EPOCH_TIMESTAMP.year) * 12 + (dt.month - EPOCH_TIMESTAMP.month) - - -def nanos_to_days(nanos: int) -> int: - """Convert a timestamp in nanoseconds to a date in days.""" - return timedelta(microseconds=nanos // 1000).days - - -def nanos_to_time(nanos: int) -> time: - """Convert a timestamp in nanoseconds to a microsecond precision time.""" - micros = nanos_to_micros(nanos) - micros, microseconds = divmod(micros, 1000000) - micros, seconds = divmod(micros, 60) - micros, minutes = divmod(micros, 60) - hours = micros - return time(hour=hours, minute=minutes, second=seconds, microsecond=microseconds) - - -def nanos_to_hours(nanos: int) -> int: - """Convert a timestamp in nanoseconds to hours from 1970-01-01T00:00.""" - return nanos // 3_600_000_000_0000 - - -def nanos_to_micros(nanos: int) -> int: - """Convert a nanoseconds timestamp to microsecond timestamp by dropping precision.""" - return nanos // 1000 diff --git a/pyiceberg/utils/schema_conversion.py b/pyiceberg/utils/schema_conversion.py index 551fa40156..8a303b7fb5 100644 --- a/pyiceberg/utils/schema_conversion.py +++ b/pyiceberg/utils/schema_conversion.py @@ -26,14 +26,7 @@ Union, ) -from pyiceberg.schema import ( - FIELD_ID_PROP, - ICEBERG_FIELD_NAME_PROP, - Schema, - SchemaVisitorPerPrimitiveType, - make_compatible_name, - visit, -) +from pyiceberg.schema import Schema, SchemaVisitorPerPrimitiveType, visit from pyiceberg.types import ( BinaryType, BooleanType, @@ -54,7 +47,6 @@ TimestampType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) from pyiceberg.utils.decimal import decimal_required_bytes @@ -70,7 +62,6 @@ "long": LongType(), "string": StringType(), "enum": StringType(), - "null": UnknownType(), } LOGICAL_FIELD_TYPE_MAPPING: Dict[Tuple[str, str], PrimitiveType] = { @@ -78,7 +69,6 @@ ("time-micros", "long"): TimeType(), ("timestamp-micros", "long"): TimestampType(), ("uuid", "fixed"): UUIDType(), - ("uuid", "string"): UUIDType(), } AvroType = Union[str, Any] @@ -219,9 +209,9 @@ def _convert_schema(self, avro_type: Union[str, Dict[str, Any]]) -> IcebergType: elif isinstance(type_identifier, str) and type_identifier in PRIMITIVE_FIELD_TYPE_MAPPING: return PRIMITIVE_FIELD_TYPE_MAPPING[type_identifier] else: - raise TypeError(f"Type not recognized: {avro_type}") + raise TypeError(f"Unknown type: {avro_type}") else: - raise TypeError(f"Type not recognized: {avro_type}") + raise TypeError(f"Unknown type: {avro_type}") def _convert_field(self, field: Dict[str, Any]) -> NestedField: """Convert an Avro field into an Iceberg equivalent field. @@ -232,13 +222,13 @@ def _convert_field(self, field: Dict[str, Any]) -> NestedField: Returns: The Iceberg equivalent field. """ - if FIELD_ID_PROP not in field: - raise ValueError(f"Cannot convert field, missing {FIELD_ID_PROP}: {field}") + if "field-id" not in field: + raise ValueError(f"Cannot convert field, missing field-id: {field}") plain_type, required = self._resolve_union(field["type"]) return NestedField( - field_id=field[FIELD_ID_PROP], + field_id=field["field-id"], name=field["name"], field_type=self._convert_schema(plain_type), required=required, @@ -531,20 +521,14 @@ def field(self, field: NestedField, field_result: AvroType) -> AvroType: if isinstance(field_result, dict) and field_result.get("type") == "record": field_result["name"] = f"r{field.field_id}" - original_name = field.name - sanitized_name = make_compatible_name(original_name) - result = { - "name": sanitized_name, - FIELD_ID_PROP: field.field_id, + "name": field.name, + "field-id": field.field_id, "type": field_result if field.required else ["null", field_result], } - if original_name != sanitized_name: - result[ICEBERG_FIELD_NAME_PROP] = original_name - if field.write_default is not None: - result["default"] = field.write_default + result["default"] = field.write_default # type: ignore elif field.optional: result["default"] = None @@ -577,8 +561,8 @@ def map(self, map_type: MapType, key_result: AvroType, value_result: AvroType) - "type": "record", "name": f"k{self.last_map_key_field_id}_v{self.last_map_value_field_id}", "fields": [ - {"name": "key", "type": key_result, FIELD_ID_PROP: self.last_map_key_field_id}, - {"name": "value", "type": value_result, FIELD_ID_PROP: self.last_map_value_field_id}, + {"name": "key", "type": key_result, "field-id": self.last_map_key_field_id}, + {"name": "value", "type": value_result, "field-id": self.last_map_value_field_id}, ], }, "logicalType": "map", @@ -619,17 +603,13 @@ def visit_time(self, time_type: TimeType) -> AvroType: return {"type": "long", "logicalType": "time-micros"} def visit_timestamp(self, timestamp_type: TimestampType) -> AvroType: + # Iceberg only supports micro's return {"type": "long", "logicalType": "timestamp-micros", "adjust-to-utc": False} - def visit_timestamp_ns(self, timestamp_type: TimestampType) -> AvroType: - return {"type": "long", "logicalType": "timestamp-nanos", "adjust-to-utc": False} - def visit_timestamptz(self, timestamptz_type: TimestamptzType) -> AvroType: + # Iceberg only supports micro's return {"type": "long", "logicalType": "timestamp-micros", "adjust-to-utc": True} - def visit_timestamptz_ns(self, timestamptz_type: TimestamptzType) -> AvroType: - return {"type": "long", "logicalType": "timestamp-nanos", "adjust-to-utc": True} - def visit_string(self, string_type: StringType) -> AvroType: return "string" @@ -638,6 +618,3 @@ def visit_uuid(self, uuid_type: UUIDType) -> AvroType: def visit_binary(self, binary_type: BinaryType) -> AvroType: return "bytes" - - def visit_unknown(self, unknown_type: UnknownType) -> AvroType: - return "null" diff --git a/pyproject.toml b/pyproject.toml index e642f2888b..f8c27a04b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ # under the License. [tool.poetry] name = "pyiceberg" -version = "0.10.0" +version = "0.9.0" readme = "README.md" homepage = "https://py.iceberg.apache.org/" repository = "https://github.com/apache/iceberg-python" @@ -53,7 +53,7 @@ python = "^3.9.2, !=3.9.7" mmh3 = ">=4.0.0,<6.0.0" requests = ">=2.20.0,<3.0.0" click = ">=7.1.1,<9.0.0" -rich = ">=10.11.0,<15.0.0" +rich = ">=10.11.0,<14.0.0" strictyaml = ">=1.7.0,<2.0.0" # CVE-2020-14343 was fixed in 5.4. pydantic = ">=2.0,<3.0,!=2.4.0,!=2.4.1" # 2.4.0, 2.4.1 has a critical bug sortedcontainers = "2.4.0" @@ -61,64 +61,58 @@ fsspec = ">=2023.1.0" pyparsing = ">=3.1.0,<4.0.0" zstandard = ">=0.13.0,<1.0.0" tenacity = ">=8.2.3,<10.0.0" -pyroaring = ">=1.0.0,<2.0.0" -pyarrow = { version = ">=17.0.0", optional = true } -google-auth = { version = ">=2.4.0", optional = true } +pyarrow = { version = ">=17.0.0,<20.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<2.0.0", optional = true } ray = [ { version = "==2.10.0", python = "<3.9", optional = true }, - { version = ">=2.10.0,<=2.44.0", python = ">=3.9", optional = true }, + { version = ">=2.10.0,<3.0.0", python = ">=3.9", optional = true }, ] python-snappy = { version = ">=0.6.0,<1.0.0", optional = true } thrift = { version = ">=0.13.0,<1.0.0", optional = true } +mypy-boto3-glue = { version = ">=1.28.18", optional = true } boto3 = { version = ">=1.24.59", optional = true } s3fs = { version = ">=2023.1.0", optional = true } adlfs = { version = ">=2023.1.0", optional = true } gcsfs = { version = ">=2023.1.0", optional = true } -huggingface-hub = { version = ">=0.24.0", optional = true } psycopg2-binary = { version = ">=2.9.6", optional = true } sqlalchemy = { version = "^2.0.18", optional = true } -bodo = { version = ">=2025.7.4", optional = true } -daft = { version = ">=0.5.0", optional = true } -cachetools = ">=5.5,<7.0" -pyiceberg-core = { version = ">=0.5.1,<0.7.0", optional = true } +getdaft = { version = ">=0.2.12", optional = true } +cachetools = "^5.5.0" +pyiceberg-core = { version = "^0.4.0", optional = true } polars = { version = "^1.21.0", optional = true } thrift-sasl = { version = ">=0.4.3", optional = true } -kerberos = {version = "^1.3.1", optional = true} -datafusion = { version = ">=45", optional = true } [tool.poetry.group.dev.dependencies] pytest = "7.4.4" pytest-checkdocs = "2.13.0" pytest-lazy-fixture = "0.6.3" -pre-commit = "4.3.0" -fastavro = "1.12.0" +pre-commit = "4.1.0" +fastavro = "1.10.0" coverage = { version = "^7.4.2", extras = ["toml"] } requests-mock = "1.12.1" moto = { version = "^5.0.2", extras = ["server"] } -typing-extensions = "4.14.1" -pytest-mock = "3.14.1" -pyspark = "3.5.6" -cython = "3.1.2" +typing-extensions = "4.12.2" +pytest-mock = "3.14.0" +pyspark = "3.5.3" +cython = "3.0.12" deptry = ">=0.14,<0.24" +datafusion = "^44.0.0" docutils = "!=0.21.post1" # https://github.com/python-poetry/poetry/issues/9248#issuecomment-2026240520 -mypy-boto3-glue = ">=1.28.18" -mypy-boto3-dynamodb = ">=1.28.18" [tool.poetry.group.docs.dependencies] # for mkdocs mkdocs = "1.6.1" -griffe = "1.11.1" -jinja2 = "3.1.6" -mkdocstrings = "0.30.0" -mkdocstrings-python = "1.16.12" -mkdocs-literate-nav = "0.6.2" -mkdocs-autorefs = "1.4.2" +griffe = "1.5.7" +jinja2 = "3.1.5" +mkdocstrings = "0.28.1" +mkdocstrings-python = "1.16.1" +mkdocs-literate-nav = "0.6.1" +mkdocs-autorefs = "1.3.1" mkdocs-gen-files = "0.5.0" -mkdocs-material = "9.6.16" +mkdocs-material = "9.6.4" mkdocs-material-extensions = "1.3.1" -mkdocs-section-index = "0.3.10" +mkdocs-section-index = "0.3.9" [[tool.mypy.overrides]] module = "pytest_mock.*" @@ -128,10 +122,6 @@ ignore_missing_imports = true module = "pyarrow.*" ignore_missing_imports = true -[[tool.mypy.overrides]] -module = "google.*" -ignore_missing_imports = true - [[tool.mypy.overrides]] module = "pandas.*" ignore_missing_imports = true @@ -224,10 +214,6 @@ ignore_missing_imports = true module = "mypy_boto3_glue.*" ignore_missing_imports = true -[[tool.mypy.overrides]] -module = "mypy_boto3_dynamodb.*" -ignore_missing_imports = true - [[tool.mypy.overrides]] module = "moto" ignore_missing_imports = true @@ -300,18 +286,17 @@ generate-setup-file = false script = "build-module.py" [tool.poetry.extras] -pyarrow = ["pyarrow", "pyiceberg-core"] +pyarrow = ["pyarrow"] pandas = ["pandas", "pyarrow"] duckdb = ["duckdb", "pyarrow"] ray = ["ray", "pyarrow", "pandas"] -bodo = ["bodo"] -daft = ["daft"] +daft = ["getdaft"] polars = ["polars"] snappy = ["python-snappy"] hive = ["thrift"] -hive-kerberos = ["thrift", "thrift_sasl", "kerberos"] +hive-kerberos = ["thrift", "thrift_sasl"] s3fs = ["s3fs"] -glue = ["boto3"] +glue = ["boto3", "mypy-boto3-glue"] adlfs = ["adlfs"] dynamodb = ["boto3"] zstandard = ["zstandard"] @@ -319,14 +304,9 @@ sql-postgres = ["sqlalchemy", "psycopg2-binary"] sql-sqlite = ["sqlalchemy"] gcsfs = ["gcsfs"] rest-sigv4 = ["boto3"] -hf = ["huggingface-hub"] pyiceberg-core = ["pyiceberg-core"] -datafusion = ["datafusion"] -gcp-auth = ["google-auth"] [tool.pytest.ini_options] -testpaths = ["tests"] - markers = [ "unmarked: marks a test as a unittest", "s3: marks a test as requiring access to s3 compliant storage (use with --aws-access-key-id, --aws-secret-access-key, and --endpoint args)", @@ -445,10 +425,6 @@ ignore_missing_imports = true module = "gcsfs.*" ignore_missing_imports = true -[[tool.mypy.overrides]] -module = "huggingface_hub.*" -ignore_missing_imports = true - [[tool.mypy.overrides]] module = "packaging.*" ignore_missing_imports = true @@ -493,10 +469,6 @@ ignore_missing_imports = true module = "daft.*" ignore_missing_imports = true -[[tool.mypy.overrides]] -module = "bodo.*" -ignore_missing_imports = true - [[tool.mypy.overrides]] module = "pyparsing.*" ignore_missing_imports = true @@ -537,9 +509,5 @@ ignore_missing_imports = true module = "datafusion.*" ignore_missing_imports = true -[[tool.mypy.overrides]] -module = "pyroaring.*" -ignore_missing_imports = true - [tool.coverage.run] source = ['pyiceberg/'] diff --git a/tests/avro/test_file.py b/tests/avro/test_file.py index 137215ebc8..0756b2670c 100644 --- a/tests/avro/test_file.py +++ b/tests/avro/test_file.py @@ -16,7 +16,8 @@ # under the License. import inspect from _decimal import Decimal -from datetime import datetime +from copy import copy +from datetime import date, datetime, time from enum import Enum from tempfile import TemporaryDirectory from typing import Any @@ -27,7 +28,7 @@ import pyiceberg.avro.file as avro from pyiceberg.avro.codecs.deflate import DeflateCodec -from pyiceberg.avro.file import AvroFileHeader +from pyiceberg.avro.file import META_SCHEMA, AvroFileHeader from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import ( DEFAULT_BLOCK_SIZE, @@ -39,7 +40,7 @@ ManifestEntryStatus, ) from pyiceberg.schema import Schema -from pyiceberg.typedef import Record, TableVersion +from pyiceberg.typedef import Record from pyiceberg.types import ( BooleanType, DateType, @@ -60,17 +61,26 @@ def get_deflate_compressor() -> None: - header = AvroFileHeader(bytes(0), {"avro.codec": "deflate"}, bytes(16)) + header = AvroFileHeader(struct=META_SCHEMA) + header[0] = bytes(0) + header[1] = {"avro.codec": "deflate"} + header[2] = bytes(16) assert header.compression_codec() == DeflateCodec def get_null_compressor() -> None: - header = AvroFileHeader(bytes(0), {"avro.codec": "null"}, bytes(16)) + header = AvroFileHeader(struct=META_SCHEMA) + header[0] = bytes(0) + header[1] = {"avro.codec": "null"} + header[2] = bytes(16) assert header.compression_codec() is None def test_unknown_codec() -> None: - header = AvroFileHeader(bytes(0), {"avro.codec": "unknown"}, bytes(16)) + header = AvroFileHeader(struct=META_SCHEMA) + header[0] = bytes(0) + header[1] = {"avro.codec": "unknown"} + header[2] = bytes(16) with pytest.raises(ValueError) as exc_info: header.compression_codec() @@ -79,7 +89,10 @@ def test_unknown_codec() -> None: def test_missing_schema() -> None: - header = AvroFileHeader(bytes(0), {}, bytes(16)) + header = AvroFileHeader(struct=META_SCHEMA) + header[0] = bytes(0) + header[1] = {} + header[2] = bytes(16) with pytest.raises(ValueError) as exc_info: header.get_schema() @@ -106,7 +119,7 @@ def todict(obj: Any) -> Any: def test_write_manifest_entry_with_iceberg_read_with_fastavro_v1() -> None: - data_file = DataFile.from_args( + data_file = DataFile( content=DataFileContent.DATA, file_path="s3://some-path/some-file.parquet", file_format=FileFormat.PARQUET, @@ -124,7 +137,7 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro_v1() -> None: equality_ids=[], sort_order_id=4, ) - entry = ManifestEntry.from_args( + entry = ManifestEntry( status=ManifestEntryStatus.ADDED, snapshot_id=8638475580105682862, sequence_number=0, @@ -172,7 +185,7 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro_v1() -> None: def test_write_manifest_entry_with_iceberg_read_with_fastavro_v2() -> None: - data_file = DataFile.from_args( + data_file = DataFile( content=DataFileContent.DATA, file_path="s3://some-path/some-file.parquet", file_format=FileFormat.PARQUET, @@ -190,7 +203,7 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro_v2() -> None: equality_ids=[], sort_order_id=4, ) - entry = ManifestEntry.from_args( + entry = ManifestEntry( status=ManifestEntryStatus.ADDED, snapshot_id=8638475580105682862, sequence_number=0, @@ -226,32 +239,33 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro_v2() -> None: @pytest.mark.parametrize("format_version", [1, 2]) -def test_write_manifest_entry_with_fastavro_read_with_iceberg(format_version: TableVersion) -> None: - data_file_dict = { - "content": DataFileContent.DATA, - "file_path": "s3://some-path/some-file.parquet", - "file_format": FileFormat.PARQUET, - "partition": Record(), - "record_count": 131327, - "file_size_in_bytes": 220669226, - "column_sizes": {1: 220661854}, - "value_counts": {1: 131327}, - "null_value_counts": {1: 0}, - "nan_value_counts": {}, - "lower_bounds": {1: b"aaaaaaaaaaaaaaaa"}, - "upper_bounds": {1: b"zzzzzzzzzzzzzzzz"}, - "key_metadata": b"\xde\xad\xbe\xef", - "split_offsets": [4, 133697593], - "equality_ids": [], - "sort_order_id": 4, - "spec_id": 3, - } - data_file_v2 = DataFile.from_args(**data_file_dict) # type: ignore - - entry = ManifestEntry.from_args( +def test_write_manifest_entry_with_fastavro_read_with_iceberg(format_version: int) -> None: + data_file = DataFile( + content=DataFileContent.DATA, + file_path="s3://some-path/some-file.parquet", + file_format=FileFormat.PARQUET, + partition=Record(), + record_count=131327, + file_size_in_bytes=220669226, + column_sizes={1: 220661854}, + value_counts={1: 131327}, + null_value_counts={1: 0}, + nan_value_counts={}, + lower_bounds={1: b"aaaaaaaaaaaaaaaa"}, + upper_bounds={1: b"zzzzzzzzzzzzzzzz"}, + key_metadata=b"\xde\xad\xbe\xef", + split_offsets=[4, 133697593], + equality_ids=[], + sort_order_id=4, + spec_id=3, + ) + + entry = ManifestEntry( status=ManifestEntryStatus.ADDED, snapshot_id=8638475580105682862, - data_file=data_file_v2, + sequence_number=0, + file_sequence_number=0, + data_file=data_file, ) with TemporaryDirectory() as tmpdir: @@ -283,13 +297,17 @@ def test_write_manifest_entry_with_fastavro_read_with_iceberg(format_version: Ta avro_entry = next(it) if format_version == 1: - data_file_v1 = DataFile.from_args(**data_file_dict, _table_format_version=format_version) + v1_datafile = copy(data_file) + # Not part of V1 + v1_datafile.equality_ids = None - assert avro_entry == ManifestEntry.from_args( - status=1, + assert avro_entry == ManifestEntry( + status=ManifestEntryStatus.ADDED, snapshot_id=8638475580105682862, - data_file=data_file_v1, - _table_format_version=format_version, + # Not part of v1 + sequence_number=None, + file_sequence_number=None, + data_file=v1_datafile, ) elif format_version == 2: assert entry == avro_entry @@ -317,57 +335,22 @@ def test_all_primitive_types(is_required: bool) -> None: ) class AllPrimitivesRecord(Record): - @property - def field_fixed(self) -> bytes: - return self._data[0] - - @property - def field_decimal(self) -> Decimal: - return self._data[1] - - @property - def field_bool(self) -> bool: - return self._data[2] - - @property - def field_int(self) -> int: - return self._data[3] - - @property - def field_long(self) -> int: - return self._data[4] - - @property - def field_float(self) -> float: - return self._data[5] - - @property - def field_double(self) -> float: - return self._data[6] - - @property - def field_date(self) -> datetime: - return self._data[7] - - @property - def field_time(self) -> datetime: - return self._data[8] - - @property - def field_timestamp(self) -> datetime: - return self._data[9] - - @property - def field_timestamptz(self) -> datetime: - return self._data[10] - - @property - def field_string(self) -> str: - return self._data[11] - - @property - def field_uuid(self) -> UUID: - return self._data[12] + field_fixed: bytes + field_decimal: Decimal + field_bool: bool + field_int: int + field_long: int + field_float: float + field_double: float + field_date: date + field_time: time + field_timestamp: datetime + field_timestamptz: datetime + field_string: str + field_uuid: UUID + + def __init__(self, *data: Any, **named_data: Any) -> None: + super().__init__(*data, **{"struct": all_primitives_schema.as_struct(), **named_data}) record = AllPrimitivesRecord( b"\x124Vx\x124Vx\x124Vx\x124Vx", diff --git a/tests/avro/test_reader.py b/tests/avro/test_reader.py index 82473d11d1..c97d421d87 100644 --- a/tests/avro/test_reader.py +++ b/tests/avro/test_reader.py @@ -35,11 +35,8 @@ StringReader, StructReader, TimeReader, - TimestampNanoReader, TimestampReader, - TimestamptzNanoReader, TimestamptzReader, - UnknownReader, UUIDReader, ) from pyiceberg.avro.resolver import construct_reader @@ -58,14 +55,12 @@ IntegerType, LongType, NestedField, + PrimitiveType, StringType, StructType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) @@ -317,18 +312,10 @@ def test_timestamp_reader() -> None: assert construct_reader(TimestampType()) == TimestampReader() -def test_timestamp_ns_reader() -> None: - assert construct_reader(TimestampNanoType()) == TimestampNanoReader() - - def test_timestamptz_reader() -> None: assert construct_reader(TimestamptzType()) == TimestamptzReader() -def test_timestamptz_ns_reader() -> None: - assert construct_reader(TimestamptzNanoType()) == TimestamptzNanoReader() - - def test_string_reader() -> None: assert construct_reader(StringType()) == StringReader() @@ -338,7 +325,13 @@ def test_binary_reader() -> None: def test_unknown_type() -> None: - assert construct_reader(UnknownType()) == UnknownReader() + class UnknownType(PrimitiveType): + root: str = "UnknownType" + + with pytest.raises(ValueError) as exc_info: + construct_reader(UnknownType()) + + assert "Unknown type:" in str(exc_info.value) def test_uuid_reader() -> None: @@ -350,7 +343,7 @@ def test_read_struct(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: decoder = decoder_class(b"\x18") struct = StructType(NestedField(1, "id", IntegerType(), required=True)) result = StructReader(((0, IntegerReader()),), Record, struct).read(decoder) - assert repr(result) == "Record[12]" + assert repr(result) == "Record[id=12]" @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) @@ -361,10 +354,10 @@ def test_read_struct_lambda(decoder_class: Callable[[bytes], BinaryDecoder]) -> # You can also pass in an arbitrary function that returns a struct result = StructReader( ((0, IntegerReader()),), - Record, + lambda struct: Record(struct=struct), struct, # pylint: disable=unnecessary-lambda ).read(decoder) - assert repr(result) == "Record[12]" + assert repr(result) == "Record[id=12]" @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) @@ -376,3 +369,19 @@ def test_read_not_struct_type(decoder_class: Callable[[bytes], BinaryDecoder]) - _ = StructReader(((0, IntegerReader()),), str, struct).read(decoder) # type: ignore assert "Incompatible with StructProtocol: " in str(exc_info.value) + + +@pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) +def test_read_struct_exception_handling(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: + decoder = decoder_class(b"\x18") + + def raise_err(struct: StructType) -> None: + raise TypeError("boom") + + struct = StructType(NestedField(1, "id", IntegerType(), required=True)) + # You can also pass in an arbitrary function that returns a struct + + with pytest.raises(ValueError) as exc_info: + _ = StructReader(((0, IntegerReader()),), raise_err, struct).read(decoder) # type: ignore + + assert "Unable to initialize struct:" in str(exc_info.value) diff --git a/tests/avro/test_resolver.py b/tests/avro/test_resolver.py index 26b44e8e23..b5388b5ebb 100644 --- a/tests/avro/test_resolver.py +++ b/tests/avro/test_resolver.py @@ -289,15 +289,15 @@ class Ints(Record): c: int = Field() d: Optional[int] = Field() - ints_schema = Schema( + MANIFEST_ENTRY_SCHEMA = Schema( NestedField(3, "c", IntegerType(), required=True), NestedField(4, "d", IntegerType(), required=False), ) - with AvroFile[Ints](PyArrowFileIO().new_input(tmp_avro_file), ints_schema, {-1: Ints}) as reader: + with AvroFile[Ints](PyArrowFileIO().new_input(tmp_avro_file), MANIFEST_ENTRY_SCHEMA, {-1: Ints}) as reader: records = list(reader) - assert repr(records) == "[Ints[3, None]]" + assert repr(records) == "[Ints[c=3, d=None]]" def test_resolver_initial_value() -> None: diff --git a/tests/avro/test_writer.py b/tests/avro/test_writer.py index c655156c2a..39b8ecc393 100644 --- a/tests/avro/test_writer.py +++ b/tests/avro/test_writer.py @@ -19,6 +19,9 @@ import io import struct from _decimal import Decimal +from typing import Dict, List + +import pytest from pyiceberg.avro.encoder import BinaryEncoder from pyiceberg.avro.resolver import construct_writer @@ -32,12 +35,9 @@ FloatWriter, IntegerWriter, StringWriter, - TimestampNanoWriter, - TimestamptzNanoWriter, TimestamptzWriter, TimestampWriter, TimeWriter, - UnknownWriter, UUIDWriter, ) from pyiceberg.typedef import Record @@ -54,14 +54,12 @@ LongType, MapType, NestedField, + PrimitiveType, StringType, StructType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) @@ -116,18 +114,10 @@ def test_timestamp_writer() -> None: assert construct_writer(TimestampType()) == TimestampWriter() -def test_timestamp_ns_writer() -> None: - assert construct_writer(TimestampNanoType()) == TimestampNanoWriter() - - def test_timestamptz_writer() -> None: assert construct_writer(TimestamptzType()) == TimestamptzWriter() -def test_timestamptz_ns_writer() -> None: - assert construct_writer(TimestamptzNanoType()) == TimestamptzNanoWriter() - - def test_string_writer() -> None: assert construct_writer(StringType()) == StringWriter() @@ -137,7 +127,13 @@ def test_binary_writer() -> None: def test_unknown_type() -> None: - assert construct_writer(UnknownType()) == UnknownWriter() + class UnknownType(PrimitiveType): + root: str = "UnknownType" + + with pytest.raises(ValueError) as exc_info: + construct_writer(UnknownType()) + + assert "Unknown type:" in str(exc_info.value) def test_uuid_writer() -> None: @@ -151,11 +147,16 @@ def test_write_simple_struct() -> None: schema = StructType( NestedField(1, "id", IntegerType(), required=True), NestedField(2, "property", StringType(), required=True) ) - struct = Record(12, "awesome") + + class MyStruct(Record): + id: int + property: str + + my_struct = MyStruct(id=12, property="awesome") enc_str = b"awesome" - construct_writer(schema).write(encoder, struct) + construct_writer(schema).write(encoder, my_struct) assert output.getbuffer() == b"".join([b"\x18", zigzag_encode(len(enc_str)), enc_str]) @@ -169,13 +170,18 @@ def test_write_struct_with_dict() -> None: NestedField(2, "properties", MapType(3, IntegerType(), 4, IntegerType()), required=True), ) - struct = Record(12, {1: 2, 3: 4}) - construct_writer(schema).write(encoder, struct) + class MyStruct(Record): + id: int + properties: Dict[int, int] + + my_struct = MyStruct(id=12, properties={1: 2, 3: 4}) + + construct_writer(schema).write(encoder, my_struct) assert output.getbuffer() == b"".join( [ b"\x18", - zigzag_encode(len(struct[1])), + zigzag_encode(len(my_struct.properties)), zigzag_encode(1), zigzag_encode(2), zigzag_encode(3), @@ -194,14 +200,18 @@ def test_write_struct_with_list() -> None: NestedField(2, "properties", ListType(3, IntegerType()), required=True), ) - struct = Record(12, [1, 2, 3, 4]) + class MyStruct(Record): + id: int + properties: List[int] + + my_struct = MyStruct(id=12, properties=[1, 2, 3, 4]) - construct_writer(schema).write(encoder, struct) + construct_writer(schema).write(encoder, my_struct) assert output.getbuffer() == b"".join( [ b"\x18", - zigzag_encode(len(struct[1])), + zigzag_encode(len(my_struct.properties)), zigzag_encode(1), zigzag_encode(2), zigzag_encode(3), diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index 2ab97b4285..c00f4fde95 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -88,10 +88,6 @@ def given_catalog_has_a_table( ) -def test_load_catalog_in_memory() -> None: - assert load_catalog("catalog", type="in-memory") - - def test_load_catalog_impl_not_full_path() -> None: with pytest.raises(ValueError) as exc_info: load_catalog("catalog", **{"py-catalog-impl": "CustomCatalog"}) @@ -318,7 +314,7 @@ def test_rename_table(catalog: InMemoryCatalog) -> None: assert table._identifier == Catalog.identifier_to_tuple(new_table) # And - assert catalog._namespace_exists(table._identifier[:-1]) + assert ("new", "namespace") in catalog.list_namespaces() # And with pytest.raises(NoSuchTableError, match=NO_SUCH_TABLE_ERROR): @@ -342,7 +338,7 @@ def test_rename_table_from_self_identifier(catalog: InMemoryCatalog) -> None: assert new_table._identifier == Catalog.identifier_to_tuple(new_table_name) # And - assert catalog._namespace_exists(new_table._identifier[:-1]) + assert ("new", "namespace") in catalog.list_namespaces() # And with pytest.raises(NoSuchTableError, match=NO_SUCH_TABLE_ERROR): @@ -356,7 +352,7 @@ def test_create_namespace(catalog: InMemoryCatalog) -> None: catalog.create_namespace(TEST_TABLE_NAMESPACE, TEST_TABLE_PROPERTIES) # Then - assert catalog._namespace_exists(TEST_TABLE_NAMESPACE) + assert TEST_TABLE_NAMESPACE in catalog.list_namespaces() assert TEST_TABLE_PROPERTIES == catalog.load_namespace_properties(TEST_TABLE_NAMESPACE) @@ -379,12 +375,7 @@ def test_list_namespaces(catalog: InMemoryCatalog) -> None: # When namespaces = catalog.list_namespaces() # Then - assert TEST_TABLE_NAMESPACE[:1] in namespaces - - # When - namespaces = catalog.list_namespaces(TEST_TABLE_NAMESPACE) - # Then - assert not namespaces + assert TEST_TABLE_NAMESPACE in namespaces def test_drop_namespace(catalog: InMemoryCatalog) -> None: @@ -393,7 +384,7 @@ def test_drop_namespace(catalog: InMemoryCatalog) -> None: # When catalog.drop_namespace(TEST_TABLE_NAMESPACE) # Then - assert not catalog._namespace_exists(TEST_TABLE_NAMESPACE) + assert TEST_TABLE_NAMESPACE not in catalog.list_namespaces() def test_drop_namespace_raises_error_when_namespace_does_not_exist(catalog: InMemoryCatalog) -> None: @@ -442,7 +433,7 @@ def test_update_namespace_metadata(catalog: InMemoryCatalog) -> None: summary = catalog.update_namespace_properties(TEST_TABLE_NAMESPACE, updates=new_metadata) # Then - assert catalog._namespace_exists(TEST_TABLE_NAMESPACE) + assert TEST_TABLE_NAMESPACE in catalog.list_namespaces() assert new_metadata.items() <= catalog.load_namespace_properties(TEST_TABLE_NAMESPACE).items() assert summary.removed == [] assert sorted(summary.updated) == ["key3", "key4"] @@ -459,7 +450,7 @@ def test_update_namespace_metadata_removals(catalog: InMemoryCatalog) -> None: summary = catalog.update_namespace_properties(TEST_TABLE_NAMESPACE, remove_metadata, new_metadata) # Then - assert catalog._namespace_exists(TEST_TABLE_NAMESPACE) + assert TEST_TABLE_NAMESPACE in catalog.list_namespaces() assert new_metadata.items() <= catalog.load_namespace_properties(TEST_TABLE_NAMESPACE).items() assert remove_metadata.isdisjoint(catalog.load_namespace_properties(TEST_TABLE_NAMESPACE).keys()) assert summary.removed == ["key1"] diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index c7c39a600d..7ab875af90 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -626,11 +626,3 @@ def test_table_exists( assert test_catalog.table_exists(identifier) is True # Act and Assert for an non-existing table assert test_catalog.table_exists(("non", "exist")) is False - - -@mock_aws -def test_dynamodb_client_override() -> None: - catalog_name = "glue" - test_client = boto3.client("dynamodb", region_name="us-west-2") - test_catalog = DynamoDbCatalog(catalog_name, test_client) - assert test_catalog.dynamodb is test_client diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 0ff43cd52b..eabbffb378 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -548,18 +548,6 @@ def test_drop_non_empty_namespace( test_catalog.drop_namespace(database_name) -@mock_aws -def test_drop_namespace_that_contains_non_iceberg_tables( - _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str -) -> None: - test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}/"}) - test_catalog.create_namespace(namespace=database_name) - test_catalog.glue.create_table(DatabaseName=database_name, TableInput={"Name": "hive_table"}) - - with pytest.raises(NamespaceNotEmptyError): - test_catalog.drop_namespace(database_name) - - @mock_aws def test_drop_non_exist_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) @@ -791,8 +779,6 @@ def test_commit_table_properties( Name=table_name, ) assert table_info["Table"]["Description"] == "test_description" - assert table_info["Table"]["Parameters"]["test_a"] == "test_aa" - assert table_info["Table"]["Parameters"]["test_c"] == "test_c" @mock_aws @@ -946,11 +932,3 @@ def test_glue_endpoint_override(_bucket_initialize: None, moto_endpoint_url: str catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}", "glue.endpoint": test_endpoint} ) assert test_catalog.glue.meta.endpoint_url == test_endpoint - - -@mock_aws -def test_glue_client_override() -> None: - catalog_name = "glue" - test_client = boto3.client("glue", region_name="us-west-2") - test_catalog = GlueCatalog(catalog_name, test_client) - assert test_catalog.glue is test_client diff --git a/tests/catalog/test_hive.py b/tests/catalog/test_hive.py index 1edb4f7295..07cd79d4c7 100644 --- a/tests/catalog/test_hive.py +++ b/tests/catalog/test_hive.py @@ -15,21 +15,14 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=protected-access,redefined-outer-name -import base64 import copy -import struct -import threading import uuid -from collections.abc import Generator from copy import deepcopy -from typing import Optional from unittest.mock import MagicMock, call, patch import pytest -import thrift.transport.TSocket from hive_metastore.ttypes import ( AlreadyExistsException, - EnvironmentContext, FieldSchema, InvalidOperationException, LockResponse, @@ -45,16 +38,11 @@ from pyiceberg.catalog import PropertiesUpdateSummary from pyiceberg.catalog.hive import ( - DO_NOT_UPDATE_STATS, - DO_NOT_UPDATE_STATS_DEFAULT, - HIVE_KERBEROS_AUTH, - HIVE_KERBEROS_SERVICE_NAME, LOCK_CHECK_MAX_WAIT_TIME, LOCK_CHECK_MIN_WAIT_TIME, LOCK_CHECK_RETRIES, HiveCatalog, _construct_hive_storage_descriptor, - _HiveClient, ) from pyiceberg.exceptions import ( NamespaceAlreadyExistsError, @@ -195,59 +183,6 @@ def hive_database(tmp_path_factory: pytest.TempPathFactory) -> HiveDatabase: ) -class SaslServer(threading.Thread): - def __init__(self, socket: thrift.transport.TSocket.TServerSocket, response: bytes) -> None: - super().__init__() - self.daemon = True - self._socket = socket - self._response = response - self._port = None - self._port_bound = threading.Event() - - def run(self) -> None: - self._socket.listen() - - try: - address = self._socket.handle.getsockname() - # AF_INET addresses are 2-tuples (host, port) and AF_INET6 are - # 4-tuples (host, port, ...), i.e. port is always at index 1. - _host, self._port, *_ = address - finally: - self._port_bound.set() - - # Accept connections and respond to each connection with the same message. - # The responsibility for closing the connection is on the client - while True: - try: - client = self._socket.accept() - if client: - client.write(self._response) - client.flush() - except Exception: - pass - - @property - def port(self) -> Optional[int]: - self._port_bound.wait() - return self._port - - def close(self) -> None: - self._socket.close() - - -@pytest.fixture(scope="session") -def kerberized_hive_metastore_fake_url() -> Generator[str, None, None]: - server = SaslServer( - # Port 0 means pick any available port. - socket=thrift.transport.TSocket.TServerSocket(port=0), - # Always return a message with status 5 (COMPLETE). - response=struct.pack(">BI", 5, 0), - ) - server.start() - yield f"thrift://localhost:{server.port}" - server.close() - - def test_no_uri_supplied() -> None: with pytest.raises(KeyError): HiveCatalog("production") @@ -343,13 +278,7 @@ def test_create_table( storedAsSubDirectories=None, ), partitionKeys=None, - parameters={ - "EXTERNAL": "TRUE", - "table_type": "ICEBERG", - "metadata_location": metadata_location, - "write.parquet.compression-codec": "zstd", - "owner": "javaberg", - }, + parameters={"EXTERNAL": "TRUE", "table_type": "ICEBERG", "metadata_location": metadata_location}, viewOriginalText=None, viewExpandedText=None, tableType="EXTERNAL_TABLE", @@ -524,13 +453,7 @@ def test_create_table_with_given_location_removes_trailing_slash( storedAsSubDirectories=None, ), partitionKeys=None, - parameters={ - "EXTERNAL": "TRUE", - "table_type": "ICEBERG", - "metadata_location": metadata_location, - "write.parquet.compression-codec": "zstd", - "owner": "javaberg", - }, + parameters={"EXTERNAL": "TRUE", "table_type": "ICEBERG", "metadata_location": metadata_location}, viewOriginalText=None, viewExpandedText=None, tableType="EXTERNAL_TABLE", @@ -890,7 +813,7 @@ def test_rename_table(hive_table: HiveTable) -> None: catalog._client = MagicMock() catalog._client.__enter__().get_table.side_effect = [hive_table, renamed_table] - catalog._client.__enter__().alter_table_with_environment_context.return_value = None + catalog._client.__enter__().alter_table.return_value = None from_identifier = ("default", "new_tabl2e") to_identifier = ("default", "new_tabl3e") @@ -900,12 +823,7 @@ def test_rename_table(hive_table: HiveTable) -> None: calls = [call(dbname="default", tbl_name="new_tabl2e"), call(dbname="default", tbl_name="new_tabl3e")] catalog._client.__enter__().get_table.assert_has_calls(calls) - catalog._client.__enter__().alter_table_with_environment_context.assert_called_with( - dbname="default", - tbl_name="new_tabl2e", - new_tbl=renamed_table, - environment_context=EnvironmentContext(properties={DO_NOT_UPDATE_STATS: DO_NOT_UPDATE_STATS_DEFAULT}), - ) + catalog._client.__enter__().alter_table.assert_called_with(dbname="default", tbl_name="new_tabl2e", new_tbl=renamed_table) def test_rename_table_from_self_identifier(hive_table: HiveTable) -> None: @@ -923,7 +841,7 @@ def test_rename_table_from_self_identifier(hive_table: HiveTable) -> None: renamed_table.tableName = "new_tabl3e" catalog._client.__enter__().get_table.side_effect = [hive_table, renamed_table] - catalog._client.__enter__().alter_table_with_environment_context.return_value = None + catalog._client.__enter__().alter_table.return_value = None to_identifier = ("default", "new_tabl3e") table = catalog.rename_table(from_table.name(), to_identifier) @@ -931,19 +849,14 @@ def test_rename_table_from_self_identifier(hive_table: HiveTable) -> None: calls = [call(dbname="default", tbl_name="new_tabl2e"), call(dbname="default", tbl_name="new_tabl3e")] catalog._client.__enter__().get_table.assert_has_calls(calls) - catalog._client.__enter__().alter_table_with_environment_context.assert_called_with( - dbname="default", - tbl_name="new_tabl2e", - new_tbl=renamed_table, - environment_context=EnvironmentContext(properties={DO_NOT_UPDATE_STATS: DO_NOT_UPDATE_STATS_DEFAULT}), - ) + catalog._client.__enter__().alter_table.assert_called_with(dbname="default", tbl_name="new_tabl2e", new_tbl=renamed_table) def test_rename_table_from_does_not_exists() -> None: catalog = HiveCatalog(HIVE_CATALOG_NAME, uri=HIVE_METASTORE_FAKE_URL) catalog._client = MagicMock() - catalog._client.__enter__().alter_table_with_environment_context.side_effect = NoSuchObjectException( + catalog._client.__enter__().alter_table.side_effect = NoSuchObjectException( message="hive.default.does_not_exists table not found" ) @@ -957,7 +870,7 @@ def test_rename_table_to_namespace_does_not_exists() -> None: catalog = HiveCatalog(HIVE_CATALOG_NAME, uri=HIVE_METASTORE_FAKE_URL) catalog._client = MagicMock() - catalog._client.__enter__().alter_table_with_environment_context.side_effect = InvalidOperationException( + catalog._client.__enter__().alter_table.side_effect = InvalidOperationException( message="Unable to change partition or table. Database default does not exist Check metastore logs for detailed stack.does_not_exists" ) @@ -1156,7 +1069,7 @@ def test_update_namespace_properties(hive_database: HiveDatabase) -> None: name="default", description=None, locationUri=hive_database.locationUri, - parameters={"label": "core"}, + parameters={"test": None, "label": "core"}, privileges=None, ownerName=None, ownerType=1, @@ -1301,20 +1214,7 @@ def test_create_hive_client_success() -> None: with patch("pyiceberg.catalog.hive._HiveClient", return_value=MagicMock()) as mock_hive_client: client = HiveCatalog._create_hive_client(properties) - mock_hive_client.assert_called_once_with("thrift://localhost:10000", "user", False, "hive") - assert client is not None - - -def test_create_hive_client_with_kerberos_success() -> None: - properties = { - "uri": "thrift://localhost:10000", - "ugi": "user", - HIVE_KERBEROS_AUTH: "true", - HIVE_KERBEROS_SERVICE_NAME: "hiveuser", - } - with patch("pyiceberg.catalog.hive._HiveClient", return_value=MagicMock()) as mock_hive_client: - client = HiveCatalog._create_hive_client(properties) - mock_hive_client.assert_called_once_with("thrift://localhost:10000", "user", True, "hiveuser") + mock_hive_client.assert_called_once_with("thrift://localhost:10000", "user", False) assert client is not None @@ -1327,7 +1227,7 @@ def test_create_hive_client_multiple_uris() -> None: client = HiveCatalog._create_hive_client(properties) assert mock_hive_client.call_count == 2 mock_hive_client.assert_has_calls( - [call("thrift://localhost:10000", "user", False, "hive"), call("thrift://localhost:10001", "user", False, "hive")] + [call("thrift://localhost:10000", "user", False), call("thrift://localhost:10001", "user", False)] ) assert client is not None @@ -1339,45 +1239,3 @@ def test_create_hive_client_failure() -> None: with pytest.raises(Exception, match="Connection failed"): HiveCatalog._create_hive_client(properties) assert mock_hive_client.call_count == 2 - - -def test_create_hive_client_with_kerberos( - kerberized_hive_metastore_fake_url: str, -) -> None: - properties = { - "uri": kerberized_hive_metastore_fake_url, - "ugi": "user", - HIVE_KERBEROS_AUTH: "true", - } - client = HiveCatalog._create_hive_client(properties) - assert client is not None - - -def test_create_hive_client_with_kerberos_using_context_manager( - kerberized_hive_metastore_fake_url: str, -) -> None: - client = _HiveClient( - uri=kerberized_hive_metastore_fake_url, - kerberos_auth=True, - ) - with ( - patch( - "puresasl.mechanisms.kerberos.authGSSClientStep", - return_value=None, - ), - patch( - "puresasl.mechanisms.kerberos.authGSSClientResponse", - return_value=base64.b64encode(b"Some Response"), - ), - patch( - "puresasl.mechanisms.GSSAPIMechanism.complete", - return_value=True, - ), - ): - with client as open_client: - assert open_client._iprot.trans.isOpen() - - # Use the context manager a second time to see if - # closing and re-opening work as expected. - with client as open_client: - assert open_client._iprot.trans.isOpen() diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index dcabd0a2ea..4ad825f5b7 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=redefined-outer-name,unused-argument -import base64 import os from typing import Any, Callable, Dict, cast from unittest import mock @@ -25,7 +24,7 @@ import pyiceberg from pyiceberg.catalog import PropertiesUpdateSummary, load_catalog -from pyiceberg.catalog.rest import OAUTH2_SERVER_URI, SNAPSHOT_LOADING_MODE, RestCatalog +from pyiceberg.catalog.rest import OAUTH2_SERVER_URI, RestCatalog from pyiceberg.exceptions import ( AuthorizationExpiredError, NamespaceAlreadyExistsError, @@ -324,19 +323,19 @@ def test_properties_sets_headers(requests_mock: Mocker) -> None: **{"header.Content-Type": "application/vnd.api+json", "header.Customized-Header": "some/value"}, ) - assert catalog._session.headers.get("Content-type") == "application/json", ( - "Expected 'Content-Type' default header not to be overwritten" - ) - assert requests_mock.last_request.headers["Content-type"] == "application/json", ( - "Config request did not include expected 'Content-Type' header" - ) + assert ( + catalog._session.headers.get("Content-type") == "application/json" + ), "Expected 'Content-Type' default header not to be overwritten" + assert ( + requests_mock.last_request.headers["Content-type"] == "application/json" + ), "Config request did not include expected 'Content-Type' header" - assert catalog._session.headers.get("Customized-Header") == "some/value", ( - "Expected 'Customized-Header' header to be 'some/value'" - ) - assert requests_mock.last_request.headers["Customized-Header"] == "some/value", ( - "Config request did not include expected 'Customized-Header' header" - ) + assert ( + catalog._session.headers.get("Customized-Header") == "some/value" + ), "Expected 'Customized-Header' header to be 'some/value'" + assert ( + requests_mock.last_request.headers["Customized-Header"] == "some/value" + ), "Config request did not include expected 'Customized-Header' header" def test_config_sets_headers(requests_mock: Mocker) -> None: @@ -353,19 +352,19 @@ def test_config_sets_headers(requests_mock: Mocker) -> None: catalog = RestCatalog("rest", uri=TEST_URI, warehouse="s3://some-bucket") catalog.create_namespace(namespace) - assert catalog._session.headers.get("Content-type") == "application/json", ( - "Expected 'Content-Type' default header not to be overwritten" - ) - assert requests_mock.last_request.headers["Content-type"] == "application/json", ( - "Create namespace request did not include expected 'Content-Type' header" - ) + assert ( + catalog._session.headers.get("Content-type") == "application/json" + ), "Expected 'Content-Type' default header not to be overwritten" + assert ( + requests_mock.last_request.headers["Content-type"] == "application/json" + ), "Create namespace request did not include expected 'Content-Type' header" - assert catalog._session.headers.get("Customized-Header") == "some/value", ( - "Expected 'Customized-Header' header to be 'some/value'" - ) - assert requests_mock.last_request.headers["Customized-Header"] == "some/value", ( - "Create namespace request did not include expected 'Customized-Header' header" - ) + assert ( + catalog._session.headers.get("Customized-Header") == "some/value" + ), "Expected 'Customized-Header' header to be 'some/value'" + assert ( + requests_mock.last_request.headers["Customized-Header"] == "some/value" + ), "Create namespace request did not include expected 'Customized-Header' header" @pytest.mark.filterwarnings( @@ -556,29 +555,10 @@ def test_list_namespace_with_parent_200(rest_mock: Mocker) -> None: ] -def test_list_namespace_with_parent_404(rest_mock: Mocker) -> None: - rest_mock.get( - f"{TEST_URI}v1/namespaces?parent=some_namespace", - json={ - "error": { - "message": "Namespace provided in the `parent` query parameter is not found", - "type": "NoSuchNamespaceException", - "code": 404, - } - }, - status_code=404, - request_headers=TEST_HEADERS, - ) - - with pytest.raises(NoSuchNamespaceError): - RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN).list_namespaces(("some_namespace",)) - - @pytest.mark.filterwarnings( "ignore:Deprecated in 0.8.0, will be removed in 1.0.0. Iceberg REST client is missing the OAuth2 server URI:DeprecationWarning" ) -@pytest.mark.parametrize("status_code", [401, 419]) -def test_list_namespaces_token_expired_success_on_retries(rest_mock: Mocker, status_code: int) -> None: +def test_list_namespaces_token_expired(rest_mock: Mocker) -> None: new_token = "new_jwt_token" new_header = dict(TEST_HEADERS) new_header["Authorization"] = f"Bearer {new_token}" @@ -588,12 +568,12 @@ def test_list_namespaces_token_expired_success_on_retries(rest_mock: Mocker, sta f"{TEST_URI}v1/namespaces", [ { - "status_code": status_code, + "status_code": 419, "json": { "error": { "message": "Authorization expired.", "type": "AuthorizationExpiredError", - "code": status_code, + "code": 419, } }, "headers": TEST_HEADERS, @@ -621,10 +601,6 @@ def test_list_namespaces_token_expired_success_on_retries(rest_mock: Mocker, sta status_code=200, ) catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN, credential=TEST_CREDENTIALS) - # LegacyOAuth2AuthManager is created twice through `_create_session()` - # which results in the token being refreshed twice when the RestCatalog is initialized. - assert tokens.call_count == 2 - assert catalog.list_namespaces() == [ ("default",), ("examples",), @@ -632,7 +608,7 @@ def test_list_namespaces_token_expired_success_on_retries(rest_mock: Mocker, sta ("system",), ] assert namespaces.call_count == 2 - assert tokens.call_count == 3 + assert tokens.call_count == 1 assert catalog.list_namespaces() == [ ("default",), @@ -641,7 +617,7 @@ def test_list_namespaces_token_expired_success_on_retries(rest_mock: Mocker, sta ("system",), ] assert namespaces.call_count == 3 - assert tokens.call_count == 3 + assert tokens.call_count == 1 def test_create_namespace_200(rest_mock: Mocker) -> None: @@ -858,29 +834,6 @@ def test_load_table_200(rest_mock: Mocker, example_table_metadata_with_snapshot_ assert actual == expected -def test_load_table_200_loading_mode( - rest_mock: Mocker, example_table_metadata_with_snapshot_v1_rest_json: Dict[str, Any] -) -> None: - rest_mock.get( - f"{TEST_URI}v1/namespaces/fokko/tables/table?snapshots=refs", - json=example_table_metadata_with_snapshot_v1_rest_json, - status_code=200, - request_headers=TEST_HEADERS, - ) - catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN, **{SNAPSHOT_LOADING_MODE: "refs"}) - actual = catalog.load_table(("fokko", "table")) - expected = Table( - identifier=("fokko", "table"), - metadata_location=example_table_metadata_with_snapshot_v1_rest_json["metadata-location"], - metadata=TableMetadataV1(**example_table_metadata_with_snapshot_v1_rest_json["metadata"]), - io=load_file_io(), - catalog=catalog, - ) - # First compare the dicts - assert actual.metadata.model_dump() == expected.metadata.model_dump() - assert actual == expected - - def test_load_table_honor_access_delegation( rest_mock: Mocker, example_table_metadata_with_snapshot_v1_rest_json: Dict[str, Any] ) -> None: @@ -1520,132 +1473,6 @@ def test_request_session_with_ssl_client_cert() -> None: assert "Could not find the TLS certificate file, invalid path: path_to_client_cert" in str(e.value) -def test_rest_catalog_with_basic_auth_type(rest_mock: Mocker) -> None: - # Given - rest_mock.get( - f"{TEST_URI}v1/config", - json={"defaults": {}, "overrides": {}}, - status_code=200, - ) - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "basic", - "basic": { - "username": "one", - "password": "two", - }, - }, - } - catalog = RestCatalog("rest", **catalog_properties) # type: ignore - assert catalog.uri == TEST_URI - - encoded_user_pass = base64.b64encode(b"one:two").decode() - expected_auth_header = f"Basic {encoded_user_pass}" - assert rest_mock.last_request.headers["Authorization"] == expected_auth_header - - -def test_rest_catalog_with_custom_auth_type() -> None: - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "custom", - "impl": "dummy.nonexistent.package", - "custom": { - "property1": "one", - "property2": "two", - }, - }, - } - with pytest.raises(ValueError) as e: - # Missing namespace - RestCatalog("rest", **catalog_properties) # type: ignore - assert "Could not load AuthManager class for 'dummy.nonexistent.package'" in str(e.value) - - -def test_rest_catalog_with_custom_basic_auth_type(rest_mock: Mocker) -> None: - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "custom", - "impl": "pyiceberg.catalog.rest.auth.BasicAuthManager", - "custom": { - "username": "one", - "password": "two", - }, - }, - } - rest_mock.get( - f"{TEST_URI}v1/config", - json={"defaults": {}, "overrides": {}}, - status_code=200, - ) - catalog = RestCatalog("rest", **catalog_properties) # type: ignore - assert catalog.uri == TEST_URI - - encoded_user_pass = base64.b64encode(b"one:two").decode() - expected_auth_header = f"Basic {encoded_user_pass}" - assert rest_mock.last_request.headers["Authorization"] == expected_auth_header - - -def test_rest_catalog_with_custom_auth_type_no_impl() -> None: - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "custom", - "custom": { - "property1": "one", - "property2": "two", - }, - }, - } - with pytest.raises(ValueError) as e: - # Missing namespace - RestCatalog("rest", **catalog_properties) # type: ignore - assert "auth.impl must be specified when using custom auth.type" in str(e.value) - - -def test_rest_catalog_with_non_custom_auth_type_impl() -> None: - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "basic", - "impl": "basic.package", - "basic": { - "username": "one", - "password": "two", - }, - }, - } - with pytest.raises(ValueError) as e: - # Missing namespace - RestCatalog("rest", **catalog_properties) # type: ignore - assert "auth.impl can only be specified when using custom auth.type" in str(e.value) - - -def test_rest_catalog_with_unsupported_auth_type() -> None: - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "unsupported", - "unsupported": { - "property1": "one", - "property2": "two", - }, - }, - } - with pytest.raises(ValueError) as e: - # Missing namespace - RestCatalog("rest", **catalog_properties) # type: ignore - assert "Could not load AuthManager class for 'unsupported'" in str(e.value) - - EXAMPLE_ENV = {"PYICEBERG_CATALOG__PRODUCTION__URI": TEST_URI} @@ -1748,42 +1575,3 @@ def test_drop_view_204(rest_mock: Mocker) -> None: request_headers=TEST_HEADERS, ) RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN).drop_view(("some_namespace", "some_view")) - - -@mock.patch("google.auth.transport.requests.Request") -@mock.patch("google.auth.load_credentials_from_file") -def test_rest_catalog_with_google_credentials_path( - mock_load_creds: mock.MagicMock, mock_google_request: mock.MagicMock, rest_mock: Mocker -) -> None: - mock_credentials = mock.MagicMock() - mock_credentials.token = "file_token" - mock_load_creds.return_value = (mock_credentials, "test_project_file") - - # Given - rest_mock.get( - f"{TEST_URI}v1/config", - json={"defaults": {}, "overrides": {}}, - status_code=200, - ) - # Given - catalog_properties = { - "uri": TEST_URI, - "auth": { - "type": "google", - "google": { - "credentials_path": "/fake/path.json", - }, - }, - } - catalog = RestCatalog("rest", **catalog_properties) # type: ignore - assert catalog.uri == TEST_URI - - expected_auth_header = "Bearer file_token" - assert rest_mock.last_request.headers["Authorization"] == expected_auth_header - - mock_load_creds.assert_called_with("/fake/path.json", scopes=None) - mock_credentials.refresh.assert_called_once_with(mock_google_request.return_value) - history = rest_mock.request_history - assert len(history) == 1 - actual_headers = history[0].headers - assert actual_headers["Authorization"] == expected_auth_header diff --git a/tests/catalog/test_rest_auth.py b/tests/catalog/test_rest_auth.py deleted file mode 100644 index 2ef02ed005..0000000000 --- a/tests/catalog/test_rest_auth.py +++ /dev/null @@ -1,155 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import base64 -from unittest.mock import MagicMock, patch - -import pytest -import requests -from requests_mock import Mocker - -from pyiceberg.catalog.rest.auth import AuthManagerAdapter, BasicAuthManager, GoogleAuthManager, NoopAuthManager - -TEST_URI = "https://iceberg-test-catalog/" -GOOGLE_CREDS_URI = "https://oauth2.googleapis.com/token" - - -@pytest.fixture -def rest_mock(requests_mock: Mocker) -> Mocker: - requests_mock.get( - TEST_URI, - json={}, - status_code=200, - ) - return requests_mock - - -@pytest.fixture -def google_mock(requests_mock: Mocker) -> Mocker: - requests_mock.post(GOOGLE_CREDS_URI, json={"access_token": "aaaabbb"}, status_code=200) - requests_mock.get( - TEST_URI, - json={}, - status_code=200, - ) - return requests_mock - - -def test_noop_auth_header(rest_mock: Mocker) -> None: - auth_manager = NoopAuthManager() - session = requests.Session() - session.auth = AuthManagerAdapter(auth_manager) - - session.get(TEST_URI) - history = rest_mock.request_history - assert len(history) == 1 - actual_headers = history[0].headers - assert "Authorization" not in actual_headers - - -def test_basic_auth_header(rest_mock: Mocker) -> None: - username = "testuser" - password = "testpassword" - expected_token = base64.b64encode(f"{username}:{password}".encode()).decode() - expected_header = f"Basic {expected_token}" - - auth_manager = BasicAuthManager(username=username, password=password) - session = requests.Session() - session.auth = AuthManagerAdapter(auth_manager) - - session.get(TEST_URI) - history = rest_mock.request_history - assert len(history) == 1 - actual_headers = history[0].headers - assert actual_headers["Authorization"] == expected_header - - -@patch("google.auth.transport.requests.Request") -@patch("google.auth.default") -def test_google_auth_manager_default_credentials( - mock_google_auth_default: MagicMock, mock_google_request: MagicMock, rest_mock: Mocker -) -> None: - """Test GoogleAuthManager with default application credentials.""" - mock_credentials = MagicMock() - mock_credentials.token = "test_token" - mock_google_auth_default.return_value = (mock_credentials, "test_project") - - auth_manager = GoogleAuthManager() - session = requests.Session() - session.auth = AuthManagerAdapter(auth_manager) - session.get(TEST_URI) - - mock_google_auth_default.assert_called_once_with(scopes=None) - mock_credentials.refresh.assert_called_once_with(mock_google_request.return_value) - history = rest_mock.request_history - assert len(history) == 1 - actual_headers = history[0].headers - assert actual_headers["Authorization"] == "Bearer test_token" - - -@patch("google.auth.transport.requests.Request") -@patch("google.auth.load_credentials_from_file") -def test_google_auth_manager_with_credentials_file( - mock_load_creds: MagicMock, mock_google_request: MagicMock, rest_mock: Mocker -) -> None: - """Test GoogleAuthManager with a credentials file path.""" - mock_credentials = MagicMock() - mock_credentials.token = "file_token" - mock_load_creds.return_value = (mock_credentials, "test_project_file") - - auth_manager = GoogleAuthManager(credentials_path="/fake/path.json") - session = requests.Session() - session.auth = AuthManagerAdapter(auth_manager) - session.get(TEST_URI) - - mock_load_creds.assert_called_once_with("/fake/path.json", scopes=None) - mock_credentials.refresh.assert_called_once_with(mock_google_request.return_value) - history = rest_mock.request_history - assert len(history) == 1 - actual_headers = history[0].headers - assert actual_headers["Authorization"] == "Bearer file_token" - - -@patch("google.auth.transport.requests.Request") -@patch("google.auth.load_credentials_from_file") -def test_google_auth_manager_with_credentials_file_and_scopes( - mock_load_creds: MagicMock, mock_google_request: MagicMock, rest_mock: Mocker -) -> None: - """Test GoogleAuthManager with a credentials file path and scopes.""" - mock_credentials = MagicMock() - mock_credentials.token = "scoped_token" - mock_load_creds.return_value = (mock_credentials, "test_project_scoped") - scopes = ["https://www.googleapis.com/auth/bigquery"] - - auth_manager = GoogleAuthManager(credentials_path="/fake/path.json", scopes=scopes) - session = requests.Session() - session.auth = AuthManagerAdapter(auth_manager) - session.get(TEST_URI) - - mock_load_creds.assert_called_once_with("/fake/path.json", scopes=scopes) - mock_credentials.refresh.assert_called_once_with(mock_google_request.return_value) - history = rest_mock.request_history - assert len(history) == 1 - actual_headers = history[0].headers - assert actual_headers["Authorization"] == "Bearer scoped_token" - - -def test_google_auth_manager_import_error() -> None: - """Test GoogleAuthManager raises ImportError if google-auth is not installed.""" - with patch.dict("sys.modules", {"google.auth": None, "google.auth.transport.requests": None}): - with pytest.raises(ImportError, match="Google Auth libraries not found. Please install 'google-auth'."): - GoogleAuthManager() diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 235951484f..33a76f7308 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -17,7 +17,7 @@ import os from pathlib import Path -from typing import Any, Generator, cast +from typing import Any, Generator, List, cast import pyarrow as pa import pytest @@ -72,7 +72,7 @@ def catalog_name() -> str: @pytest.fixture(name="random_table_identifier") def fixture_random_table_identifier(warehouse: Path, database_name: str, table_name: str) -> Identifier: - os.makedirs(f"{warehouse}/{database_name}/{table_name}/metadata/", exist_ok=True) + os.makedirs(f"{warehouse}/{database_name}.db/{table_name}/metadata/", exist_ok=True) return database_name, table_name @@ -80,13 +80,13 @@ def fixture_random_table_identifier(warehouse: Path, database_name: str, table_n def fixture_another_random_table_identifier(warehouse: Path, database_name: str, table_name: str) -> Identifier: database_name = database_name + "_new" table_name = table_name + "_new" - os.makedirs(f"{warehouse}/{database_name}/{table_name}/metadata/", exist_ok=True) + os.makedirs(f"{warehouse}/{database_name}.db/{table_name}/metadata/", exist_ok=True) return database_name, table_name @pytest.fixture(name="random_hierarchical_identifier") def fixture_random_hierarchical_identifier(warehouse: Path, hierarchical_namespace_name: str, table_name: str) -> Identifier: - os.makedirs(f"{warehouse}/{hierarchical_namespace_name}/{table_name}/metadata/", exist_ok=True) + os.makedirs(f"{warehouse}/{hierarchical_namespace_name}.db/{table_name}/metadata/", exist_ok=True) return Catalog.identifier_to_tuple(".".join((hierarchical_namespace_name, table_name))) @@ -96,7 +96,7 @@ def fixture_another_random_hierarchical_identifier( ) -> Identifier: hierarchical_namespace_name = hierarchical_namespace_name + "_new" table_name = table_name + "_new" - os.makedirs(f"{warehouse}/{hierarchical_namespace_name}/{table_name}/metadata/", exist_ok=True) + os.makedirs(f"{warehouse}/{hierarchical_namespace_name}.db/{table_name}/metadata/", exist_ok=True) return Catalog.identifier_to_tuple(".".join((hierarchical_namespace_name, table_name))) @@ -115,7 +115,7 @@ def catalog_memory(catalog_name: str, warehouse: Path) -> Generator[SqlCatalog, @pytest.fixture(scope="module") def catalog_sqlite(catalog_name: str, warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": f"sqlite:////{warehouse}/sql-catalog", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog(catalog_name, **props) @@ -126,7 +126,7 @@ def catalog_sqlite(catalog_name: str, warehouse: Path) -> Generator[SqlCatalog, @pytest.fixture(scope="module") def catalog_uri(warehouse: Path) -> str: - return f"sqlite:////{warehouse}/sql-catalog" + return f"sqlite:////{warehouse}/sql-catalog.db" @pytest.fixture(scope="module") @@ -137,7 +137,7 @@ def alchemy_engine(catalog_uri: str) -> Engine: @pytest.fixture(scope="module") def catalog_sqlite_without_rowcount(catalog_name: str, warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": f"sqlite:////{warehouse}/sql-catalog", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog(catalog_name, **props) @@ -150,7 +150,7 @@ def catalog_sqlite_without_rowcount(catalog_name: str, warehouse: Path) -> Gener @pytest.fixture(scope="module") def catalog_sqlite_fsspec(catalog_name: str, warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": f"sqlite:////{warehouse}/sql-catalog", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", PY_IO_IMPL: FSSPEC_FILE_IO, } @@ -176,7 +176,7 @@ def test_creation_with_echo_parameter(catalog_name: str, warehouse: Path) -> Non for echo_param, expected_echo_value in test_cases: props = { - "uri": f"sqlite:////{warehouse}/sql-catalog", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } # None is for default value @@ -199,7 +199,7 @@ def test_creation_with_pool_pre_ping_parameter(catalog_name: str, warehouse: Pat for pool_pre_ping_param, expected_pool_pre_ping_value in test_cases: props = { - "uri": f"sqlite:////{warehouse}/sql-catalog", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } # None is for default value @@ -219,7 +219,7 @@ def test_creation_from_impl(catalog_name: str, warehouse: Path) -> None: catalog_name, **{ "py-catalog-impl": "pyiceberg.catalog.sql.SqlCatalog", - "uri": f"sqlite:////{warehouse}/sql-catalog", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", }, ), @@ -493,7 +493,7 @@ def test_create_table_with_given_location_removes_trailing_slash( identifier_tuple = Catalog.identifier_to_tuple(table_identifier) namespace = Catalog.namespace_from(table_identifier) table_name = Catalog.table_name_from(identifier_tuple) - location = f"file://{warehouse}/{catalog.name}/{table_name}-given" + location = f"file://{warehouse}/{catalog.name}.db/{table_name}-given" catalog.create_namespace(namespace) catalog.create_table(table_identifier, table_schema_nested, location=f"{location}/") table = catalog.load_table(table_identifier) @@ -1027,7 +1027,7 @@ def test_create_namespace_if_not_exists(catalog: SqlCatalog, database_name: str) @pytest.mark.parametrize("namespace", [lazy_fixture("database_name"), lazy_fixture("hierarchical_namespace_name")]) def test_create_namespace(catalog: SqlCatalog, namespace: str) -> None: catalog.create_namespace(namespace) - assert (Catalog.identifier_to_tuple(namespace)[:1]) in catalog.list_namespaces() + assert (Catalog.identifier_to_tuple(namespace)) in catalog.list_namespaces() @pytest.mark.parametrize( @@ -1074,7 +1074,7 @@ def test_create_namespace_with_comment_and_location(catalog: SqlCatalog, namespa } catalog.create_namespace(namespace=namespace, properties=test_properties) loaded_database_list = catalog.list_namespaces() - assert Catalog.identifier_to_tuple(namespace)[:1] in loaded_database_list + assert Catalog.identifier_to_tuple(namespace) in loaded_database_list properties = catalog.load_namespace_properties(namespace) assert properties["comment"] == "this is a test description" assert properties["location"] == test_location @@ -1135,42 +1135,17 @@ def test_namespace_exists(catalog: SqlCatalog) -> None: lazy_fixture("catalog_sqlite"), ], ) -def test_list_namespaces(catalog: SqlCatalog) -> None: - namespace_list = ["db", "db.ns1", "db.ns1.ns2", "db.ns2", "db2", "db2.ns1", "db%"] +@pytest.mark.parametrize("namespace_list", [lazy_fixture("database_list"), lazy_fixture("hierarchical_namespace_list")]) +def test_list_namespaces(catalog: SqlCatalog, namespace_list: List[str]) -> None: for namespace in namespace_list: - if not catalog._namespace_exists(namespace): - catalog.create_namespace(namespace) - + catalog.create_namespace(namespace) + # Test global list ns_list = catalog.list_namespaces() - for ns in [("db",), ("db%",), ("db2",)]: - assert ns in ns_list - - ns_list = catalog.list_namespaces("db") - assert sorted(ns_list) == [("db", "ns1"), ("db", "ns2")] - - ns_list = catalog.list_namespaces("db.ns1") - assert sorted(ns_list) == [("db", "ns1", "ns2")] - - ns_list = catalog.list_namespaces("db.ns1.ns2") - assert len(ns_list) == 0 - - -@pytest.mark.parametrize( - "catalog", - [ - lazy_fixture("catalog_memory"), - lazy_fixture("catalog_sqlite"), - ], -) -def test_list_namespaces_fuzzy_match(catalog: SqlCatalog) -> None: - namespace_list = ["db.ns1", "db.ns1.ns2", "db.ns2", "db.ns1X.ns3", "db_.ns1.ns2", "db2.ns1.ns2"] for namespace in namespace_list: - if not catalog._namespace_exists(namespace): - catalog.create_namespace(namespace) - - assert catalog.list_namespaces("db.ns1") == [("db", "ns1", "ns2")] - - assert catalog.list_namespaces("db_.ns1") == [("db_", "ns1", "ns2")] + assert Catalog.identifier_to_tuple(namespace) in ns_list + # Test individual namespace list + assert len(one_namespace := catalog.list_namespaces(namespace)) == 1 + assert Catalog.identifier_to_tuple(namespace) == one_namespace[0] @pytest.mark.parametrize( @@ -1202,13 +1177,13 @@ def test_list_non_existing_namespaces(catalog: SqlCatalog) -> None: def test_drop_namespace(catalog: SqlCatalog, table_schema_nested: Schema, table_identifier: Identifier) -> None: namespace = Catalog.namespace_from(table_identifier) catalog.create_namespace(namespace) - assert catalog._namespace_exists(namespace) + assert namespace in catalog.list_namespaces() catalog.create_table(table_identifier, table_schema_nested) with pytest.raises(NamespaceNotEmptyError): catalog.drop_namespace(namespace) catalog.drop_table(table_identifier) catalog.drop_namespace(namespace) - assert not catalog._namespace_exists(namespace) + assert namespace not in catalog.list_namespaces() @pytest.mark.parametrize( @@ -1235,7 +1210,7 @@ def test_load_namespace_properties(catalog: SqlCatalog, namespace: str) -> None: warehouse_location = "/test/location" test_properties = { "comment": "this is a test description", - "location": f"{warehouse_location}/{namespace}", + "location": f"{warehouse_location}/{namespace}.db", "test_property1": "1", "test_property2": "2", "test_property3": "3", @@ -1286,7 +1261,7 @@ def test_update_namespace_properties(catalog: SqlCatalog, namespace: str) -> Non warehouse_location = "/test/location" test_properties = { "comment": "this is a test description", - "location": f"{warehouse_location}/{namespace}", + "location": f"{warehouse_location}/{namespace}.db", "test_property1": "1", "test_property2": "2", "test_property3": "3", @@ -1306,7 +1281,7 @@ def test_update_namespace_properties(catalog: SqlCatalog, namespace: str) -> Non "comment": "updated test description", "test_property4": "4", "test_property5": "5", - "location": f"{warehouse_location}/{namespace}", + "location": f"{warehouse_location}/{namespace}.db", } diff --git a/tests/cli/test_console.py b/tests/cli/test_console.py index a0e9552236..70e04071ad 100644 --- a/tests/cli/test_console.py +++ b/tests/cli/test_console.py @@ -271,7 +271,7 @@ def test_location(catalog: InMemoryCatalog) -> None: runner = CliRunner() result = runner.invoke(run, ["location", "default.my_table"]) assert result.exit_code == 0 - assert result.output == f"""{catalog._warehouse_location}/default/my_table\n""" + assert result.output == f"""{catalog._warehouse_location}/default.db/my_table\n""" def test_location_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -476,8 +476,8 @@ def test_properties_set_table(catalog: InMemoryCatalog) -> None: runner = CliRunner() result = runner.invoke(run, ["properties", "set", "table", "default.my_table", "location", "s3://new_location"]) - assert result.exit_code == 0 - assert result.output == "Set location=s3://new_location on default.my_table\n" + assert result.exit_code == 1 + assert "Writing is WIP" in result.output def test_properties_set_table_does_not_exist(catalog: InMemoryCatalog) -> None: @@ -518,8 +518,8 @@ def test_properties_remove_table(catalog: InMemoryCatalog) -> None: runner = CliRunner() result = runner.invoke(run, ["properties", "remove", "table", "default.my_table", "read.split.target.size"]) - assert result.exit_code == 0 - assert result.output == "Property read.split.target.size removed from default.my_table\n" + assert result.exit_code == 1 + assert "Writing is WIP" in result.output def test_properties_remove_table_property_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -700,7 +700,7 @@ def test_json_location(catalog: InMemoryCatalog) -> None: runner = CliRunner() result = runner.invoke(run, ["--output=json", "location", "default.my_table"]) assert result.exit_code == 0 - assert result.output == f'"{catalog._warehouse_location}/default/my_table"\n' + assert result.output == f'"{catalog._warehouse_location}/default.db/my_table"\n' def test_json_location_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -894,8 +894,8 @@ def test_json_properties_set_table(catalog: InMemoryCatalog) -> None: result = runner.invoke( run, ["--output=json", "properties", "set", "table", "default.my_table", "location", "s3://new_location"] ) - assert result.exit_code == 0 - assert result.output == """"Set location=s3://new_location on default.my_table"\n""" + assert result.exit_code == 1 + assert "Writing is WIP" in result.output def test_json_properties_set_table_does_not_exist(catalog: InMemoryCatalog) -> None: @@ -938,8 +938,8 @@ def test_json_properties_remove_table(catalog: InMemoryCatalog) -> None: runner = CliRunner() result = runner.invoke(run, ["--output=json", "properties", "remove", "table", "default.my_table", "read.split.target.size"]) - assert result.exit_code == 0 - assert result.output == """"Property read.split.target.size removed from default.my_table"\n""" + assert result.exit_code == 1 + assert "Writing is WIP" in result.output def test_json_properties_remove_table_property_does_not_exists(catalog: InMemoryCatalog) -> None: diff --git a/tests/conftest.py b/tests/conftest.py index e036a2fa54..a0e5e74522 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -47,18 +47,12 @@ import boto3 import pytest from moto import mock_aws -from pydantic_core import to_json +from pyiceberg import schema from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.catalog.noop import NoopCatalog from pyiceberg.expressions import BoundReference from pyiceberg.io import ( - ADLS_ACCOUNT_KEY, - ADLS_ACCOUNT_NAME, - ADLS_BLOB_STORAGE_AUTHORITY, - ADLS_BLOB_STORAGE_SCHEME, - ADLS_DFS_STORAGE_AUTHORITY, - ADLS_DFS_STORAGE_SCHEME, GCS_PROJECT_ID, GCS_SERVICE_HOST, GCS_TOKEN, @@ -68,12 +62,10 @@ ) from pyiceberg.io.fsspec import FsspecFileIO from pyiceberg.manifest import DataFile, FileFormat -from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Accessor, Schema from pyiceberg.serializers import ToOutputFile from pyiceberg.table import FileScanTask, Table from pyiceberg.table.metadata import TableMetadataV1, TableMetadataV2 -from pyiceberg.transforms import DayTransform, IdentityTransform from pyiceberg.types import ( BinaryType, BooleanType, @@ -148,7 +140,7 @@ def pytest_addoption(parser: pytest.Parser) -> None: @pytest.fixture(scope="session") def table_schema_simple() -> Schema: - return Schema( + return schema.Schema( NestedField(field_id=1, name="foo", field_type=StringType(), required=False), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), @@ -159,7 +151,7 @@ def table_schema_simple() -> Schema: @pytest.fixture(scope="session") def table_schema_with_full_nested_fields() -> Schema: - return Schema( + return schema.Schema( NestedField( field_id=1, name="foo", @@ -188,7 +180,7 @@ def table_schema_with_full_nested_fields() -> Schema: @pytest.fixture(scope="session") def table_schema_nested() -> Schema: - return Schema( + return schema.Schema( NestedField(field_id=1, name="foo", field_type=StringType(), required=False), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), @@ -239,7 +231,7 @@ def table_schema_nested() -> Schema: @pytest.fixture(scope="session") def table_schema_nested_with_struct_key_map() -> Schema: - return Schema( + return schema.Schema( NestedField(field_id=1, name="foo", field_type=StringType(), required=True), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), @@ -311,7 +303,7 @@ def table_schema_nested_with_struct_key_map() -> Schema: @pytest.fixture(scope="session") def table_schema_with_all_types() -> Schema: - return Schema( + return schema.Schema( NestedField(field_id=1, name="boolean", field_type=BooleanType(), required=True), NestedField(field_id=2, name="integer", field_type=IntegerType(), required=True), NestedField(field_id=3, name="long", field_type=LongType(), required=True), @@ -357,11 +349,6 @@ def table_schema_with_all_types() -> Schema: ) -@pytest.fixture(params=["abfs", "abfss", "wasb", "wasbs"]) -def adls_scheme(request: pytest.FixtureRequest) -> str: - return request.param - - @pytest.fixture(scope="session") def pyarrow_schema_simple_without_ids() -> "pa.Schema": import pyarrow as pa @@ -933,9 +920,10 @@ def generate_snapshot( {"id": 1, "name": "x", "required": True, "type": "long"}, {"id": 2, "name": "y", "required": True, "type": "long", "doc": "comment"}, {"id": 3, "name": "z", "required": True, "type": "long"}, - {"id": 4, "name": "u", "required": True, "type": "unknown"}, - {"id": 5, "name": "ns", "required": True, "type": "timestamp_ns"}, - {"id": 6, "name": "nstz", "required": True, "type": "timestamptz_ns"}, + # TODO: Add unknown, timestamp(tz)_ns + # {"id": 4, "name": "u", "required": True, "type": "unknown"}, + # {"id": 5, "name": "ns", "required": True, "type": "timestamp_ns"}, + # {"id": 6, "name": "nstz", "required": True, "type": "timestamptz_ns"}, ], }, ], @@ -1135,22 +1123,6 @@ def example_table_metadata_v3() -> Dict[str, Any]: return EXAMPLE_TABLE_METADATA_V3 -@pytest.fixture(scope="session") -def table_location(tmp_path_factory: pytest.TempPathFactory) -> str: - from pyiceberg.io.pyarrow import PyArrowFileIO - - metadata_filename = f"{uuid.uuid4()}.metadata.json" - metadata_location = str(tmp_path_factory.getbasetemp() / "metadata" / metadata_filename) - version_hint_location = str(tmp_path_factory.getbasetemp() / "metadata" / "version-hint.text") - metadata = TableMetadataV2(**EXAMPLE_TABLE_METADATA_V2) - ToOutputFile.table_metadata(metadata, PyArrowFileIO().new_output(location=metadata_location), overwrite=True) - - with PyArrowFileIO().new_output(location=version_hint_location).create(overwrite=True) as s: - s.write(metadata_filename.encode("utf-8")) - - return str(tmp_path_factory.getbasetemp()) - - @pytest.fixture(scope="session") def metadata_location(tmp_path_factory: pytest.TempPathFactory) -> str: from pyiceberg.io.pyarrow import PyArrowFileIO @@ -1258,8 +1230,8 @@ def metadata_location_gz(tmp_path_factory: pytest.TempPathFactory) -> str: {"key": 15, "value": 0}, ], "lower_bounds": [ - {"key": 2, "value": b"\x01\x00\x00\x00\x00\x00\x00\x00"}, - {"key": 3, "value": b"\x01\x00\x00\x00\x00\x00\x00\x00"}, + {"key": 2, "value": b"2020-04-01 00:00"}, + {"key": 3, "value": b"2020-04-01 00:12"}, {"key": 7, "value": b"\x03\x00\x00\x00"}, {"key": 8, "value": b"\x01\x00\x00\x00"}, {"key": 10, "value": b"\xf6(\\\x8f\xc2\x05S\xc0"}, @@ -1273,8 +1245,8 @@ def metadata_location_gz(tmp_path_factory: pytest.TempPathFactory) -> str: {"key": 19, "value": b"\x00\x00\x00\x00\x00\x00\x04\xc0"}, ], "upper_bounds": [ - {"key": 2, "value": b"\x06\x00\x00\x00\x00\x00\x00\x00"}, - {"key": 3, "value": b"\x06\x00\x00\x00\x00\x00\x00\x00"}, + {"key": 2, "value": b"2020-04-30 23:5:"}, + {"key": 3, "value": b"2020-05-01 00:41"}, {"key": 7, "value": b"\t\x01\x00\x00"}, {"key": 8, "value": b"\t\x01\x00\x00"}, {"key": 10, "value": b"\xcd\xcc\xcc\xcc\xcc,_@"}, @@ -1379,8 +1351,8 @@ def metadata_location_gz(tmp_path_factory: pytest.TempPathFactory) -> str: ], "lower_bounds": [ {"key": 1, "value": b"\x01\x00\x00\x00"}, - {"key": 2, "value": b"\x01\x00\x00\x00\x00\x00\x00\x00"}, - {"key": 3, "value": b"\x01\x00\x00\x00\x00\x00\x00\x00"}, + {"key": 2, "value": b"2020-04-01 00:00"}, + {"key": 3, "value": b"2020-04-01 00:03"}, {"key": 4, "value": b"\x00\x00\x00\x00"}, {"key": 5, "value": b"\x01\x00\x00\x00"}, {"key": 6, "value": b"N"}, @@ -1399,8 +1371,8 @@ def metadata_location_gz(tmp_path_factory: pytest.TempPathFactory) -> str: ], "upper_bounds": [ {"key": 1, "value": b"\x01\x00\x00\x00"}, - {"key": 2, "value": b"\x06\x00\x00\x00\x00\x00\x00\x00"}, - {"key": 3, "value": b"\x06\x00\x00\x00\x00\x00\x00\x00"}, + {"key": 2, "value": b"2020-04-30 23:5:"}, + {"key": 3, "value": b"2020-05-01 00:1:"}, {"key": 4, "value": b"\x06\x00\x00\x00"}, {"key": 5, "value": b"c\x00\x00\x00"}, {"key": 6, "value": b"Y"}, @@ -1861,24 +1833,7 @@ def simple_map() -> MapType: @pytest.fixture(scope="session") -def test_schema() -> Schema: - return Schema( - NestedField(1, "VendorID", IntegerType(), False), NestedField(2, "tpep_pickup_datetime", TimestampType(), False) - ) - - -@pytest.fixture(scope="session") -def test_partition_spec() -> Schema: - return PartitionSpec( - PartitionField(1, 1000, IdentityTransform(), "VendorID"), - PartitionField(2, 1001, DayTransform(), "tpep_pickup_day"), - ) - - -@pytest.fixture(scope="session") -def generated_manifest_entry_file( - avro_schema_manifest_entry: Dict[str, Any], test_schema: Schema, test_partition_spec: PartitionSpec -) -> Generator[str, None, None]: +def generated_manifest_entry_file(avro_schema_manifest_entry: Dict[str, Any]) -> Generator[str, None, None]: from fastavro import parse_schema, writer parsed_schema = parse_schema(avro_schema_manifest_entry) @@ -1886,15 +1841,7 @@ def generated_manifest_entry_file( with TemporaryDirectory() as tmpdir: tmp_avro_file = tmpdir + "/manifest.avro" with open(tmp_avro_file, "wb") as out: - writer( - out, - parsed_schema, - manifest_entry_records, - metadata={ - "schema": test_schema.model_dump_json(), - "partition-spec": to_json(test_partition_spec.fields).decode("utf-8"), - }, - ) + writer(out, parsed_schema, manifest_entry_records) yield tmp_avro_file @@ -2127,26 +2074,6 @@ def fsspec_fileio_gcs(request: pytest.FixtureRequest) -> FsspecFileIO: return fsspec.FsspecFileIO(properties=properties) -@pytest.fixture -def adls_fsspec_fileio(request: pytest.FixtureRequest) -> Generator[FsspecFileIO, None, None]: - from azure.storage.blob import BlobServiceClient - - azurite_url = request.config.getoption("--adls.endpoint") - azurite_account_name = request.config.getoption("--adls.account-name") - azurite_account_key = request.config.getoption("--adls.account-key") - azurite_connection_string = f"DefaultEndpointsProtocol=http;AccountName={azurite_account_name};AccountKey={azurite_account_key};BlobEndpoint={azurite_url}/{azurite_account_name};" - properties = { - "adls.connection-string": azurite_connection_string, - "adls.account-name": azurite_account_name, - } - - bbs = BlobServiceClient.from_connection_string(conn_str=azurite_connection_string) - bbs.create_container("tests") - yield fsspec.FsspecFileIO(properties=properties) - bbs.delete_container("tests") - bbs.close() - - @pytest.fixture def pyarrow_fileio_gcs(request: pytest.FixtureRequest) -> "PyArrowFileIO": from pyiceberg.io.pyarrow import PyArrowFileIO @@ -2160,34 +2087,6 @@ def pyarrow_fileio_gcs(request: pytest.FixtureRequest) -> "PyArrowFileIO": return PyArrowFileIO(properties=properties) -@pytest.fixture -def pyarrow_fileio_adls(request: pytest.FixtureRequest) -> Generator[Any, None, None]: - from azure.storage.blob import BlobServiceClient - - from pyiceberg.io.pyarrow import PyArrowFileIO - - azurite_url = request.config.getoption("--adls.endpoint") - azurite_scheme, azurite_authority = azurite_url.split("://", 1) - - azurite_account_name = request.config.getoption("--adls.account-name") - azurite_account_key = request.config.getoption("--adls.account-key") - azurite_connection_string = f"DefaultEndpointsProtocol=http;AccountName={azurite_account_name};AccountKey={azurite_account_key};BlobEndpoint={azurite_url}/{azurite_account_name};" - properties = { - ADLS_ACCOUNT_NAME: azurite_account_name, - ADLS_ACCOUNT_KEY: azurite_account_key, - ADLS_BLOB_STORAGE_AUTHORITY: azurite_authority, - ADLS_DFS_STORAGE_AUTHORITY: azurite_authority, - ADLS_BLOB_STORAGE_SCHEME: azurite_scheme, - ADLS_DFS_STORAGE_SCHEME: azurite_scheme, - } - - bbs = BlobServiceClient.from_connection_string(conn_str=azurite_connection_string) - bbs.create_container("warehouse") - yield PyArrowFileIO(properties=properties) - bbs.delete_container("warehouse") - bbs.close() - - def aws_credentials() -> None: os.environ["AWS_ACCESS_KEY_ID"] = "testing" os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" @@ -2249,6 +2148,26 @@ def fixture_dynamodb(_aws_credentials: None) -> Generator[boto3.client, None, No yield boto3.client("dynamodb", region_name="us-east-1") +@pytest.fixture +def adls_fsspec_fileio(request: pytest.FixtureRequest) -> Generator[FsspecFileIO, None, None]: + from azure.storage.blob import BlobServiceClient + + azurite_url = request.config.getoption("--adls.endpoint") + azurite_account_name = request.config.getoption("--adls.account-name") + azurite_account_key = request.config.getoption("--adls.account-key") + azurite_connection_string = f"DefaultEndpointsProtocol=http;AccountName={azurite_account_name};AccountKey={azurite_account_key};BlobEndpoint={azurite_url}/{azurite_account_name};" + properties = { + "adls.connection-string": azurite_connection_string, + "adls.account-name": azurite_account_name, + } + + bbs = BlobServiceClient.from_connection_string(conn_str=azurite_connection_string) + bbs.create_container("tests") + yield fsspec.FsspecFileIO(properties=properties) + bbs.delete_container("tests") + bbs.close() + + @pytest.fixture(scope="session") def empty_home_dir_path(tmp_path_factory: pytest.TempPathFactory) -> str: home_path = str(tmp_path_factory.mktemp("home")) @@ -2352,7 +2271,7 @@ def clean_up(test_catalog: Catalog) -> None: database_name = database_tuple[0] if "my_iceberg_database-" in database_name: for identifier in test_catalog.list_tables(database_name): - test_catalog.drop_table(identifier) + test_catalog.purge_table(identifier) test_catalog.drop_namespace(database_name) @@ -2375,10 +2294,8 @@ def data_file(table_schema_simple: Schema, tmp_path: str) -> str: @pytest.fixture def example_task(data_file: str) -> FileScanTask: - datafile = DataFile.from_args(file_path=data_file, file_format=FileFormat.PARQUET, file_size_in_bytes=1925) - datafile.spec_id = 0 return FileScanTask( - data_file=datafile, + data_file=DataFile(file_path=data_file, file_format=FileFormat.PARQUET, file_size_in_bytes=1925), ) @@ -2503,35 +2420,29 @@ def spark() -> "SparkSession": # Remember to also update `dev/Dockerfile` spark_version = ".".join(importlib.metadata.version("pyspark").split(".")[:2]) scala_version = "2.12" - iceberg_version = "1.9.2" - hadoop_version = "3.3.4" - aws_sdk_version = "1.12.753" + iceberg_version = "1.8.0" os.environ["PYSPARK_SUBMIT_ARGS"] = ( f"--packages org.apache.iceberg:iceberg-spark-runtime-{spark_version}_{scala_version}:{iceberg_version}," - f"org.apache.hadoop:hadoop-aws:{hadoop_version}," - f"com.amazonaws:aws-java-sdk-bundle:{aws_sdk_version}," f"org.apache.iceberg:iceberg-aws-bundle:{iceberg_version} pyspark-shell" ) os.environ["AWS_REGION"] = "us-east-1" os.environ["AWS_ACCESS_KEY_ID"] = "admin" os.environ["AWS_SECRET_ACCESS_KEY"] = "password" - os.environ["SPARK_LOCAL_IP"] = "127.0.0.1" spark = ( SparkSession.builder.appName("PyIceberg integration test") .config("spark.sql.session.timeZone", "UTC") - .config("spark.sql.shuffle.partitions", "1") - .config("spark.default.parallelism", "1") .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") - .config("spark.sql.catalog.integration.type", "rest") + .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") .config("spark.sql.catalog.integration.cache-enabled", "false") .config("spark.sql.catalog.integration.uri", "http://localhost:8181") .config("spark.sql.catalog.integration.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") .config("spark.sql.catalog.integration.warehouse", "s3://warehouse/wh/") .config("spark.sql.catalog.integration.s3.endpoint", "http://localhost:9000") .config("spark.sql.catalog.integration.s3.path-style-access", "true") + .config("spark.sql.defaultCatalog", "integration") .config("spark.sql.catalog.hive", "org.apache.iceberg.spark.SparkCatalog") .config("spark.sql.catalog.hive.type", "hive") .config("spark.sql.catalog.hive.uri", "http://localhost:9083") @@ -2539,14 +2450,6 @@ def spark() -> "SparkSession": .config("spark.sql.catalog.hive.warehouse", "s3://warehouse/hive/") .config("spark.sql.catalog.hive.s3.endpoint", "http://localhost:9000") .config("spark.sql.catalog.hive.s3.path-style-access", "true") - .config("spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog") - .config("spark.sql.catalog.spark_catalog.type", "hive") - .config("spark.sql.catalog.spark_catalog.uri", "http://localhost:9083") - .config("spark.sql.catalog.spark_catalog.warehouse", "s3://warehouse/hive/") - .config("spark.hadoop.fs.s3a.endpoint", "http://localhost:9000") - .config("spark.hadoop.fs.s3a.path.style.access", "true") - .config("spark.sql.catalogImplementation", "hive") - .config("spark.sql.defaultCatalog", "integration") .config("spark.sql.execution.arrow.pyspark.enabled", "true") .getOrCreate() ) @@ -2811,28 +2714,6 @@ def arrow_table_schema_with_all_microseconds_timestamp_precisions() -> "pa.Schem ) -@pytest.fixture(scope="session") -def arrow_table_schema_with_nanoseconds_timestamp_precisions() -> "pa.Schema": - """Pyarrow Schema with all microseconds timestamp.""" - import pyarrow as pa - - return pa.schema( - [ - ("timestamp_s", pa.timestamp(unit="us")), - ("timestamptz_s", pa.timestamp(unit="us", tz="UTC")), - ("timestamp_ms", pa.timestamp(unit="us")), - ("timestamptz_ms", pa.timestamp(unit="us", tz="UTC")), - ("timestamp_us", pa.timestamp(unit="us")), - ("timestamptz_us", pa.timestamp(unit="us", tz="UTC")), - ("timestamp_ns", pa.timestamp(unit="us")), - ("timestamptz_ns", pa.timestamp(unit="ns", tz="UTC")), - ("timestamptz_us_etc_utc", pa.timestamp(unit="us", tz="UTC")), - ("timestamptz_ns_z", pa.timestamp(unit="ns", tz="UTC")), - ("timestamptz_s_0000", pa.timestamp(unit="us", tz="UTC")), - ] - ) - - @pytest.fixture(scope="session") def table_schema_with_all_microseconds_timestamp_precision() -> Schema: """Iceberg table Schema with only date, timestamp and timestamptz values.""" @@ -2890,7 +2771,7 @@ def pyarrow_schema_with_promoted_types() -> "pa.Schema": pa.field("list", pa.list_(pa.int32()), nullable=False), # can support upcasting integer to long pa.field("map", pa.map_(pa.string(), pa.int32()), nullable=False), # can support upcasting integer to long pa.field("double", pa.float32(), nullable=True), # can support upcasting float to double - pa.field("uuid", pa.binary(length=16), nullable=True), # can support upcasting fixed to uuid + pa.field("uuid", pa.binary(length=16), nullable=True), # can support upcasting float to double ) ) @@ -2906,10 +2787,7 @@ def pyarrow_table_with_promoted_types(pyarrow_schema_with_promoted_types: "pa.Sc "list": [[1, 1], [2, 2]], "map": [{"a": 1}, {"b": 2}], "double": [1.1, 9.2], - "uuid": [ - uuid.UUID("00000000-0000-0000-0000-000000000000").bytes, - uuid.UUID("11111111-1111-1111-1111-111111111111").bytes, - ], + "uuid": [b"qZx\xefNS@\x89\x9b\xf9:\xd0\xee\x9b\xf5E", b"\x97]\x87T^JDJ\x96\x97\xf4v\xe4\x03\x0c\xde"], }, schema=pyarrow_schema_with_promoted_types, ) diff --git a/tests/expressions/test_evaluator.py b/tests/expressions/test_evaluator.py index 7b15099105..e2b1f27377 100644 --- a/tests/expressions/test_evaluator.py +++ b/tests/expressions/test_evaluator.py @@ -42,7 +42,6 @@ from pyiceberg.expressions.visitors import _InclusiveMetricsEvaluator, _StrictMetricsEvaluator from pyiceberg.manifest import DataFile, FileFormat from pyiceberg.schema import Schema -from pyiceberg.typedef import Record from pyiceberg.types import ( DoubleType, FloatType, @@ -92,7 +91,7 @@ def schema_data_file() -> Schema: @pytest.fixture def data_file() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_1.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -134,7 +133,7 @@ def data_file() -> DataFile: @pytest.fixture def data_file_2() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_2.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -150,7 +149,7 @@ def data_file_2() -> DataFile: @pytest.fixture def data_file_3() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_3.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -166,7 +165,7 @@ def data_file_3() -> DataFile: @pytest.fixture def data_file_4() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_4.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -289,10 +288,10 @@ def test_missing_stats() -> None: NestedField(2, "no_stats", DoubleType(), required=False), ) - no_stats_file = DataFile.from_args( + no_stats_file = DataFile( file_path="file_1.parquet", file_format=FileFormat.PARQUET, - partition=Record(), + partition={}, record_count=50, value_counts=None, null_value_counts=None, @@ -320,9 +319,7 @@ def test_missing_stats() -> None: def test_zero_record_file_stats(schema_data_file: Schema) -> None: - zero_record_data_file = DataFile.from_args( - file_path="file_1.parquet", file_format=FileFormat.PARQUET, partition=Record(), record_count=0 - ) + zero_record_data_file = DataFile(file_path="file_1.parquet", file_format=FileFormat.PARQUET, partition={}, record_count=0) expressions = [ LessThan("no_stats", 5), @@ -639,7 +636,7 @@ def schema_data_file_nan() -> Schema: @pytest.fixture def data_file_nan() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file.avro", file_format=FileFormat.PARQUET, partition={}, @@ -952,7 +949,7 @@ def strict_data_file_schema() -> Schema: @pytest.fixture def strict_data_file_1() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_1.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -993,7 +990,7 @@ def strict_data_file_1() -> DataFile: @pytest.fixture def strict_data_file_2() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_2.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -1018,7 +1015,7 @@ def strict_data_file_2() -> DataFile: @pytest.fixture def strict_data_file_3() -> DataFile: - return DataFile.from_args( + return DataFile( file_path="file_3.parquet", file_format=FileFormat.PARQUET, partition={}, @@ -1150,10 +1147,10 @@ def test_strict_missing_stats(strict_data_file_schema: Schema, strict_data_file_ NestedField(2, "no_stats", DoubleType(), required=False), ) - no_stats_file = DataFile.from_args( + no_stats_file = DataFile( file_path="file_1.parquet", file_format=FileFormat.PARQUET, - partition=Record(), + partition={}, record_count=50, value_counts=None, null_value_counts=None, @@ -1181,9 +1178,7 @@ def test_strict_missing_stats(strict_data_file_schema: Schema, strict_data_file_ def test_strict_zero_record_file_stats(strict_data_file_schema: Schema) -> None: - zero_record_data_file = DataFile.from_args( - file_path="file_1.parquet", file_format=FileFormat.PARQUET, partition=Record(), record_count=0 - ) + zero_record_data_file = DataFile(file_path="file_1.parquet", file_format=FileFormat.PARQUET, partition={}, record_count=0) expressions = [ LessThan("no_stats", 5), diff --git a/tests/expressions/test_expressions.py b/tests/expressions/test_expressions.py index 828d32704a..87856a04f6 100644 --- a/tests/expressions/test_expressions.py +++ b/tests/expressions/test_expressions.py @@ -64,6 +64,8 @@ from pyiceberg.schema import Accessor, Schema from pyiceberg.typedef import Record from pyiceberg.types import ( + BinaryType, + BooleanType, DecimalType, DoubleType, FloatType, @@ -73,6 +75,7 @@ NestedField, StringType, StructType, + UUIDType, ) from pyiceberg.utils.singleton import Singleton @@ -165,23 +168,6 @@ def test_notnull_bind_required() -> None: assert NotNull(Reference("a")).bind(schema) == AlwaysTrue() -def test_notnull_bind_top_struct() -> None: - schema = Schema( - NestedField( - 3, - "struct_col", - required=False, - field_type=StructType( - NestedField(1, "id", IntegerType(), required=True), - NestedField(2, "cost", DecimalType(38, 18), required=False), - ), - ), - schema_id=1, - ) - bound = BoundNotNull(BoundReference(schema.find_field(3), schema.accessor_for_field(3))) - assert NotNull(Reference("struct_col")).bind(schema) == bound - - def test_isnan_inverse() -> None: assert ~IsNaN(Reference("f")) == NotNaN(Reference("f")) @@ -588,11 +574,11 @@ def test_negate(lhs: BooleanExpression, rhs: BooleanExpression) -> None: [ ( And(ExpressionA(), ExpressionB(), ExpressionA()), - And(ExpressionA(), And(ExpressionB(), ExpressionA())), + And(And(ExpressionA(), ExpressionB()), ExpressionA()), ), ( Or(ExpressionA(), ExpressionB(), ExpressionA()), - Or(ExpressionA(), Or(ExpressionB(), ExpressionA())), + Or(Or(ExpressionA(), ExpressionB()), ExpressionA()), ), (Not(Not(ExpressionA())), ExpressionA()), ], @@ -627,7 +613,22 @@ def test_invert_always() -> None: def test_accessor_base_class() -> None: """Test retrieving a value at a position of a container using an accessor""" - struct = Record(*[None] * 12) + struct = Record( + struct=StructType( + NestedField(1, "a", StringType()), + NestedField(2, "b", StringType()), + NestedField(3, "c", StringType()), + NestedField(4, "d", IntegerType()), + NestedField(5, "e", IntegerType()), + NestedField(6, "f", IntegerType()), + NestedField(7, "g", FloatType()), + NestedField(8, "h", DecimalType(8, 4)), + NestedField(9, "i", UUIDType()), + NestedField(10, "j", BooleanType()), + NestedField(11, "k", BooleanType()), + NestedField(12, "l", BinaryType()), + ) + ) uuid_value = uuid.uuid4() @@ -697,35 +698,21 @@ def test_and() -> None: null = IsNull(Reference("a")) nan = IsNaN(Reference("b")) and_ = And(null, nan) - - # Some syntactic sugar - assert and_ == null & nan - assert str(and_) == f"And(left={str(null)}, right={str(nan)})" assert repr(and_) == f"And(left={repr(null)}, right={repr(nan)})" assert and_ == eval(repr(and_)) assert and_ == pickle.loads(pickle.dumps(and_)) - with pytest.raises(ValueError, match="Expected BooleanExpression, got: abc"): - null & "abc" # type: ignore - def test_or() -> None: null = IsNull(Reference("a")) nan = IsNaN(Reference("b")) or_ = Or(null, nan) - - # Some syntactic sugar - assert or_ == null | nan - assert str(or_) == f"Or(left={str(null)}, right={str(nan)})" assert repr(or_) == f"Or(left={repr(null)}, right={repr(nan)})" assert or_ == eval(repr(or_)) assert or_ == pickle.loads(pickle.dumps(or_)) - with pytest.raises(ValueError, match="Expected BooleanExpression, got: abc"): - null | "abc" # type: ignore - def test_not() -> None: null = IsNull(Reference("a")) @@ -967,7 +954,11 @@ def test_less_than_or_equal() -> None: def test_bound_reference_eval(table_schema_simple: Schema) -> None: """Test creating a BoundReference and evaluating it on a StructProtocol""" - struct = Record("foovalue", 123, True) + struct = Record(struct=table_schema_simple.as_struct()) + + struct[0] = "foovalue" + struct[1] = 123 + struct[2] = True position1_accessor = Accessor(position=0) position2_accessor = Accessor(position=1) @@ -1169,34 +1160,6 @@ def test_eq_bound_expression(bound_reference_str: BoundReference[str]) -> None: ) -def test_nested_bind() -> None: - schema = Schema(NestedField(1, "foo", StructType(NestedField(2, "bar", StringType()))), schema_id=1) - bound = BoundIsNull(BoundReference(schema.find_field(2), schema.accessor_for_field(2))) - assert IsNull(Reference("foo.bar")).bind(schema) == bound - - -def test_bind_dot_name() -> None: - schema = Schema(NestedField(1, "foo.bar", StringType()), schema_id=1) - bound = BoundIsNull(BoundReference(schema.find_field(1), schema.accessor_for_field(1))) - assert IsNull(Reference("foo.bar")).bind(schema) == bound - - -def test_nested_bind_with_dot_name() -> None: - schema = Schema(NestedField(1, "foo.bar", StructType(NestedField(2, "baz", StringType()))), schema_id=1) - bound = BoundIsNull(BoundReference(schema.find_field(2), schema.accessor_for_field(2))) - assert IsNull(Reference("foo.bar.baz")).bind(schema) == bound - - -def test_bind_ambiguous_name() -> None: - with pytest.raises(ValueError) as exc_info: - Schema( - NestedField(1, "foo", StructType(NestedField(2, "bar", StringType()))), - NestedField(3, "foo.bar", StringType()), - schema_id=1, - ) - assert "Invalid schema, multiple fields for name foo.bar: 2 and 3" in str(exc_info) - - # __ __ ___ # | \/ |_ _| _ \_ _ # | |\/| | || | _/ || | diff --git a/tests/expressions/test_literals.py b/tests/expressions/test_literals.py index 4d8f5557f6..6144e32776 100644 --- a/tests/expressions/test_literals.py +++ b/tests/expressions/test_literals.py @@ -393,22 +393,6 @@ def test_string_to_boolean_literal() -> None: assert literal("FALSE").to(BooleanType()) == literal(False) -def test_string_to_float_literal() -> None: - assert literal("3.141").to(FloatType()) == literal(3.141).to(FloatType()) - - -def test_string_to_float_outside_bound() -> None: - big_lit_str = literal(str(FloatType.max + 1.0e37)) - assert big_lit_str.to(FloatType()) == FloatAboveMax() - - small_lit_str = literal(str(FloatType.min - 1.0e37)) - assert small_lit_str.to(FloatType()) == FloatBelowMin() - - -def test_string_to_double_literal() -> None: - assert literal("3.141").to(DoubleType()) == literal(3.141) - - @pytest.mark.parametrize( "val", ["unknown", "off", "on", "0", "1", "y", "yes", "n", "no", "t", "f"], @@ -760,7 +744,7 @@ def test_invalid_decimal_conversions() -> None: def test_invalid_string_conversions() -> None: assert_invalid_conversions( literal("abc"), - [FixedType(1), BinaryType()], + [FloatType(), DoubleType(), FixedType(1), BinaryType()], ) diff --git a/tests/expressions/test_parser.py b/tests/expressions/test_parser.py index 064fdb8f68..9d7a3ac094 100644 --- a/tests/expressions/test_parser.py +++ b/tests/expressions/test_parser.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from decimal import Decimal - import pytest from pyparsing import ParseException @@ -41,7 +39,6 @@ Or, StartsWith, ) -from pyiceberg.expressions.literals import DecimalLiteral def test_always_true() -> None: @@ -219,22 +216,3 @@ def test_with_function() -> None: parser.parse("foo = 1 and lower(bar) = '2'") assert "Expected end of text, found 'and'" in str(exc_info) - - -def test_nested_fields() -> None: - assert EqualTo("foo.bar", "data") == parser.parse("foo.bar = 'data'") - assert LessThan("location.x", DecimalLiteral(Decimal(52.00))) == parser.parse("location.x < 52.00") - - -def test_quoted_column_with_dots() -> None: - with pytest.raises(ParseException) as exc_info: - parser.parse("\"foo.bar\".baz = 'data'") - - with pytest.raises(ParseException) as exc_info: - parser.parse("'foo.bar'.baz = 'data'") - - assert "Expected <= | <> | < | >= | > | == | = | !=, found '.'" in str(exc_info.value) - - -def test_quoted_column_with_spaces() -> None: - assert EqualTo("Foo Bar", "data") == parser.parse("\"Foo Bar\" = 'data'") diff --git a/tests/expressions/test_residual_evaluator.py b/tests/expressions/test_residual_evaluator.py index ba0a0da2e5..cf01821787 100644 --- a/tests/expressions/test_residual_evaluator.py +++ b/tests/expressions/test_residual_evaluator.py @@ -58,7 +58,7 @@ def test_identity_transform_residual() -> None: ) res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) - residual = res_eval.residual_for(Record(20170815)) + residual = res_eval.residual_for(Record(dateint=20170815)) # assert residual == True assert isinstance(residual, LessThan) @@ -67,7 +67,7 @@ def test_identity_transform_residual() -> None: assert residual.literal.value == 12 assert type(residual) is LessThan - residual = res_eval.residual_for(Record(20170801)) + residual = res_eval.residual_for(Record(dateint=20170801)) # assert isinstance(residual, UnboundPredicate) from pyiceberg.expressions import LiteralPredicate @@ -79,11 +79,11 @@ def test_identity_transform_residual() -> None: assert residual.literal.value == 11 # type :ignore # assert type(residual) == BoundGreaterThan - residual = res_eval.residual_for(Record(20170812)) + residual = res_eval.residual_for(Record(dateint=20170812)) assert residual == AlwaysTrue() - residual = res_eval.residual_for(Record(20170817)) + residual = res_eval.residual_for(Record(dateint=20170817)) assert residual == AlwaysFalse() @@ -103,7 +103,7 @@ def test_case_insensitive_identity_transform_residuals() -> None: res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) with pytest.raises(ValueError) as e: - res_eval.residual_for(Record(20170815)) + res_eval.residual_for(Record(dateint=20170815)) assert "Could not find field with name DATEINT, case_sensitive=True" in str(e.value) @@ -142,7 +142,7 @@ def test_in() -> None: res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) - residual = res_eval.residual_for(Record(20170815)) + residual = res_eval.residual_for(Record(dateint=20170815)) assert residual == AlwaysTrue() @@ -178,10 +178,10 @@ def test_not_in() -> None: res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) - residual = res_eval.residual_for(Record(20180815)) + residual = res_eval.residual_for(Record(dateint=20180815)) assert residual == AlwaysTrue() - residual = res_eval.residual_for(Record(20170815)) + residual = res_eval.residual_for(Record(dateint=20170815)) assert residual == AlwaysFalse() @@ -194,10 +194,10 @@ def test_is_nan() -> None: res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) - residual = res_eval.residual_for(Record(float("nan"))) + residual = res_eval.residual_for(Record(double=float("nan"))) assert residual == AlwaysTrue() - residual = res_eval.residual_for(Record(2)) + residual = res_eval.residual_for(Record(double=2)) assert residual == AlwaysFalse() @@ -210,10 +210,10 @@ def test_is_not_nan() -> None: res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) - residual = res_eval.residual_for(Record(None)) + residual = res_eval.residual_for(Record(double=None)) assert residual == AlwaysFalse() - residual = res_eval.residual_for(Record(2)) + residual = res_eval.residual_for(Record(double=2)) assert residual == AlwaysTrue() spec = PartitionSpec(PartitionField(51, 1051, IdentityTransform(), "float_part")) @@ -222,10 +222,10 @@ def test_is_not_nan() -> None: res_eval = residual_evaluator_of(spec=spec, expr=predicate, case_sensitive=True, schema=schema) - residual = res_eval.residual_for(Record(None)) + residual = res_eval.residual_for(Record(double=None)) assert residual == AlwaysFalse() - residual = res_eval.residual_for(Record(2)) + residual = res_eval.residual_for(Record(double=2)) assert residual == AlwaysTrue() diff --git a/tests/expressions/test_visitors.py b/tests/expressions/test_visitors.py index d0b6ab5ab4..94bfcf076c 100644 --- a/tests/expressions/test_visitors.py +++ b/tests/expressions/test_visitors.py @@ -72,7 +72,6 @@ expression_to_plain_format, rewrite_not, rewrite_to_dnf, - translate_column_names, visit, visit_bound_predicate, ) @@ -80,7 +79,6 @@ from pyiceberg.schema import Accessor, Schema from pyiceberg.typedef import Record from pyiceberg.types import ( - BooleanType, DoubleType, FloatType, IcebergType, @@ -232,13 +230,13 @@ def test_boolean_expression_visitor() -> None: "NOT", "OR", "EQUALTO", - "NOTEQUALTO", "OR", + "NOTEQUALTO", "OR", "EQUALTO", "NOT", - "NOTEQUALTO", "AND", + "NOTEQUALTO", "AND", ] @@ -337,14 +335,14 @@ def test_always_false_or_always_true_expression_binding(table_schema_simple: Sch ), ), And( - BoundIn( - BoundReference( - field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), - accessor=Accessor(position=0, inner=None), - ), - {literal("bar"), literal("baz")}, - ), And( + BoundIn( + BoundReference( + field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + accessor=Accessor(position=0, inner=None), + ), + {literal("bar"), literal("baz")}, + ), BoundEqualTo[int]( BoundReference( field=NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), @@ -352,13 +350,13 @@ def test_always_false_or_always_true_expression_binding(table_schema_simple: Sch ), literal(1), ), - BoundEqualTo( - BoundReference( - field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), - accessor=Accessor(position=0, inner=None), - ), - literal("baz"), + ), + BoundEqualTo( + BoundReference( + field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + accessor=Accessor(position=0, inner=None), ), + literal("baz"), ), ), ), @@ -410,29 +408,29 @@ def test_and_expression_binding( ), ), Or( - BoundIn( - BoundReference( - field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), - accessor=Accessor(position=0, inner=None), - ), - {literal("bar"), literal("baz")}, - ), Or( BoundIn( BoundReference( field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), accessor=Accessor(position=0, inner=None), ), - {literal("bar")}, + {literal("bar"), literal("baz")}, ), BoundIn( BoundReference( field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), accessor=Accessor(position=0, inner=None), ), - {literal("baz")}, + {literal("bar")}, ), ), + BoundIn( + BoundReference( + field=NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + accessor=Accessor(position=0, inner=None), + ), + {literal("baz")}, + ), ), ), ( @@ -824,7 +822,7 @@ def _to_byte_buffer(field_type: IcebergType, val: Any) -> bytes: def _to_manifest_file(*partitions: PartitionFieldSummary) -> ManifestFile: """Helper to create a ManifestFile""" - return ManifestFile.from_args(manifest_path="", manifest_length=0, partition_spec_id=0, partitions=partitions) + return ManifestFile(manifest_path="", manifest_length=0, partition_spec_id=0, partitions=partitions) INT_MIN_VALUE = 30 @@ -865,81 +863,81 @@ def manifest_no_stats() -> ManifestFile: def manifest() -> ManifestFile: return _to_manifest_file( # id - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=False, contains_nan=None, lower_bound=INT_MIN, upper_bound=INT_MAX, ), # all_nulls_missing_nan - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=True, contains_nan=None, lower_bound=None, upper_bound=None, ), # some_nulls - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=True, contains_nan=None, lower_bound=STRING_MIN, upper_bound=STRING_MAX, ), # no_nulls - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=False, contains_nan=None, lower_bound=STRING_MIN, upper_bound=STRING_MAX, ), # float - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=True, contains_nan=None, lower_bound=_to_byte_buffer(FloatType(), 0.0), upper_bound=_to_byte_buffer(FloatType(), 20.0), ), # all_nulls_double - PartitionFieldSummary.from_args(contains_null=True, contains_nan=None, lower_bound=None, upper_bound=None), + PartitionFieldSummary(contains_null=True, contains_nan=None, lower_bound=None, upper_bound=None), # all_nulls_no_nans - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=True, contains_nan=False, lower_bound=None, upper_bound=None, ), # all_nans - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=False, contains_nan=True, lower_bound=None, upper_bound=None, ), # both_nan_and_null - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=True, contains_nan=True, lower_bound=None, upper_bound=None, ), # no_nan_or_null - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=False, contains_nan=False, lower_bound=_to_byte_buffer(FloatType(), 0.0), upper_bound=_to_byte_buffer(FloatType(), 20.0), ), # all_nulls_missing_nan_float - PartitionFieldSummary.from_args(contains_null=True, contains_nan=None, lower_bound=None, upper_bound=None), + PartitionFieldSummary(contains_null=True, contains_nan=None, lower_bound=None, upper_bound=None), # all_same_value_or_null - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=True, contains_nan=None, lower_bound=STRING_MIN, upper_bound=STRING_MIN, ), # no_nulls_same_value_a - PartitionFieldSummary.from_args( + PartitionFieldSummary( contains_null=False, contains_nan=None, lower_bound=STRING_MIN, @@ -949,95 +947,95 @@ def manifest() -> ManifestFile: def test_all_nulls(schema: Schema, manifest: ManifestFile) -> None: - assert not _ManifestEvalVisitor(schema, NotNull(Reference("all_nulls_missing_nan")), case_sensitive=True).eval(manifest), ( - "Should skip: all nulls column with non-floating type contains all null" - ) + assert not _ManifestEvalVisitor(schema, NotNull(Reference("all_nulls_missing_nan")), case_sensitive=True).eval( + manifest + ), "Should skip: all nulls column with non-floating type contains all null" - assert _ManifestEvalVisitor(schema, NotNull(Reference("all_nulls_missing_nan_float")), case_sensitive=True).eval(manifest), ( - "Should read: no NaN information may indicate presence of NaN value" - ) + assert _ManifestEvalVisitor(schema, NotNull(Reference("all_nulls_missing_nan_float")), case_sensitive=True).eval( + manifest + ), "Should read: no NaN information may indicate presence of NaN value" - assert _ManifestEvalVisitor(schema, NotNull(Reference("some_nulls")), case_sensitive=True).eval(manifest), ( - "Should read: column with some nulls contains a non-null value" - ) + assert _ManifestEvalVisitor(schema, NotNull(Reference("some_nulls")), case_sensitive=True).eval( + manifest + ), "Should read: column with some nulls contains a non-null value" - assert _ManifestEvalVisitor(schema, NotNull(Reference("no_nulls")), case_sensitive=True).eval(manifest), ( - "Should read: non-null column contains a non-null value" - ) + assert _ManifestEvalVisitor(schema, NotNull(Reference("no_nulls")), case_sensitive=True).eval( + manifest + ), "Should read: non-null column contains a non-null value" def test_no_nulls(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, IsNull(Reference("all_nulls_missing_nan")), case_sensitive=True).eval(manifest), ( - "Should read: at least one null value in all null column" - ) + assert _ManifestEvalVisitor(schema, IsNull(Reference("all_nulls_missing_nan")), case_sensitive=True).eval( + manifest + ), "Should read: at least one null value in all null column" - assert _ManifestEvalVisitor(schema, IsNull(Reference("some_nulls")), case_sensitive=True).eval(manifest), ( - "Should read: column with some nulls contains a null value" - ) + assert _ManifestEvalVisitor(schema, IsNull(Reference("some_nulls")), case_sensitive=True).eval( + manifest + ), "Should read: column with some nulls contains a null value" - assert not _ManifestEvalVisitor(schema, IsNull(Reference("no_nulls")), case_sensitive=True).eval(manifest), ( - "Should skip: non-null column contains no null values" - ) + assert not _ManifestEvalVisitor(schema, IsNull(Reference("no_nulls")), case_sensitive=True).eval( + manifest + ), "Should skip: non-null column contains no null values" - assert _ManifestEvalVisitor(schema, IsNull(Reference("both_nan_and_null")), case_sensitive=True).eval(manifest), ( - "Should read: both_nan_and_null column contains no null values" - ) + assert _ManifestEvalVisitor(schema, IsNull(Reference("both_nan_and_null")), case_sensitive=True).eval( + manifest + ), "Should read: both_nan_and_null column contains no null values" def test_is_nan(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, IsNaN(Reference("float")), case_sensitive=True).eval(manifest), ( - "Should read: no information on if there are nan value in float column" - ) + assert _ManifestEvalVisitor(schema, IsNaN(Reference("float")), case_sensitive=True).eval( + manifest + ), "Should read: no information on if there are nan value in float column" - assert _ManifestEvalVisitor(schema, IsNaN(Reference("all_nulls_double")), case_sensitive=True).eval(manifest), ( - "Should read: no NaN information may indicate presence of NaN value" - ) + assert _ManifestEvalVisitor(schema, IsNaN(Reference("all_nulls_double")), case_sensitive=True).eval( + manifest + ), "Should read: no NaN information may indicate presence of NaN value" - assert _ManifestEvalVisitor(schema, IsNaN(Reference("all_nulls_missing_nan_float")), case_sensitive=True).eval(manifest), ( - "Should read: no NaN information may indicate presence of NaN value" - ) + assert _ManifestEvalVisitor(schema, IsNaN(Reference("all_nulls_missing_nan_float")), case_sensitive=True).eval( + manifest + ), "Should read: no NaN information may indicate presence of NaN value" - assert not _ManifestEvalVisitor(schema, IsNaN(Reference("all_nulls_no_nans")), case_sensitive=True).eval(manifest), ( - "Should skip: no nan column doesn't contain nan value" - ) + assert not _ManifestEvalVisitor(schema, IsNaN(Reference("all_nulls_no_nans")), case_sensitive=True).eval( + manifest + ), "Should skip: no nan column doesn't contain nan value" - assert _ManifestEvalVisitor(schema, IsNaN(Reference("all_nans")), case_sensitive=True).eval(manifest), ( - "Should read: all_nans column contains nan value" - ) + assert _ManifestEvalVisitor(schema, IsNaN(Reference("all_nans")), case_sensitive=True).eval( + manifest + ), "Should read: all_nans column contains nan value" - assert _ManifestEvalVisitor(schema, IsNaN(Reference("both_nan_and_null")), case_sensitive=True).eval(manifest), ( - "Should read: both_nan_and_null column contains nan value" - ) + assert _ManifestEvalVisitor(schema, IsNaN(Reference("both_nan_and_null")), case_sensitive=True).eval( + manifest + ), "Should read: both_nan_and_null column contains nan value" - assert not _ManifestEvalVisitor(schema, IsNaN(Reference("no_nan_or_null")), case_sensitive=True).eval(manifest), ( - "Should skip: no_nan_or_null column doesn't contain nan value" - ) + assert not _ManifestEvalVisitor(schema, IsNaN(Reference("no_nan_or_null")), case_sensitive=True).eval( + manifest + ), "Should skip: no_nan_or_null column doesn't contain nan value" def test_not_nan(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, NotNaN(Reference("float")), case_sensitive=True).eval(manifest), ( - "Should read: no information on if there are nan value in float column" - ) + assert _ManifestEvalVisitor(schema, NotNaN(Reference("float")), case_sensitive=True).eval( + manifest + ), "Should read: no information on if there are nan value in float column" - assert _ManifestEvalVisitor(schema, NotNaN(Reference("all_nulls_double")), case_sensitive=True).eval(manifest), ( - "Should read: all null column contains non nan value" - ) + assert _ManifestEvalVisitor(schema, NotNaN(Reference("all_nulls_double")), case_sensitive=True).eval( + manifest + ), "Should read: all null column contains non nan value" - assert _ManifestEvalVisitor(schema, NotNaN(Reference("all_nulls_no_nans")), case_sensitive=True).eval(manifest), ( - "Should read: no_nans column contains non nan value" - ) + assert _ManifestEvalVisitor(schema, NotNaN(Reference("all_nulls_no_nans")), case_sensitive=True).eval( + manifest + ), "Should read: no_nans column contains non nan value" - assert not _ManifestEvalVisitor(schema, NotNaN(Reference("all_nans")), case_sensitive=True).eval(manifest), ( - "Should skip: all nans column doesn't contain non nan value" - ) + assert not _ManifestEvalVisitor(schema, NotNaN(Reference("all_nans")), case_sensitive=True).eval( + manifest + ), "Should skip: all nans column doesn't contain non nan value" - assert _ManifestEvalVisitor(schema, NotNaN(Reference("both_nan_and_null")), case_sensitive=True).eval(manifest), ( - "Should read: both_nan_and_null nans column contains non nan value" - ) + assert _ManifestEvalVisitor(schema, NotNaN(Reference("both_nan_and_null")), case_sensitive=True).eval( + manifest + ), "Should read: both_nan_and_null nans column contains non nan value" - assert _ManifestEvalVisitor(schema, NotNaN(Reference("no_nan_or_null")), case_sensitive=True).eval(manifest), ( - "Should read: no_nan_or_null column contains non nan value" - ) + assert _ManifestEvalVisitor(schema, NotNaN(Reference("no_nan_or_null")), case_sensitive=True).eval( + manifest + ), "Should read: no_nan_or_null column contains non nan value" def test_missing_stats(schema: Schema, manifest_no_stats: ManifestFile) -> None: @@ -1055,15 +1053,15 @@ def test_missing_stats(schema: Schema, manifest_no_stats: ManifestFile) -> None: ] for expr in expressions: - assert _ManifestEvalVisitor(schema, expr, case_sensitive=True).eval(manifest_no_stats), ( - f"Should read when missing stats for expr: {expr}" - ) + assert _ManifestEvalVisitor(schema, expr, case_sensitive=True).eval( + manifest_no_stats + ), f"Should read when missing stats for expr: {expr}" def test_not(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, Not(LessThan(Reference("id"), INT_MIN_VALUE - 25)), case_sensitive=True).eval(manifest), ( - "Should read: not(false)" - ) + assert _ManifestEvalVisitor(schema, Not(LessThan(Reference("id"), INT_MIN_VALUE - 25)), case_sensitive=True).eval( + manifest + ), "Should read: not(false)" assert not _ManifestEvalVisitor(schema, Not(GreaterThan(Reference("id"), INT_MIN_VALUE - 25)), case_sensitive=True).eval( manifest @@ -1120,21 +1118,21 @@ def test_or(schema: Schema, manifest: ManifestFile) -> None: def test_integer_lt(schema: Schema, manifest: ManifestFile) -> None: - assert not _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MIN_VALUE - 25), case_sensitive=True).eval(manifest), ( - "Should not read: id range below lower bound (5 < 30)" - ) + assert not _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MIN_VALUE - 25), case_sensitive=True).eval( + manifest + ), "Should not read: id range below lower bound (5 < 30)" - assert not _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval(manifest), ( - "Should not read: id range below lower bound (30 is not < 30)" - ) + assert not _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval( + manifest + ), "Should not read: id range below lower bound (30 is not < 30)" - assert _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MIN_VALUE + 1), case_sensitive=True).eval(manifest), ( - "Should read: one possible id" - ) + assert _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MIN_VALUE + 1), case_sensitive=True).eval( + manifest + ), "Should read: one possible id" - assert _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: may possible ids" - ) + assert _ManifestEvalVisitor(schema, LessThan(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: may possible ids" def test_integer_lt_eq(schema: Schema, manifest: ManifestFile) -> None: @@ -1146,13 +1144,13 @@ def test_integer_lt_eq(schema: Schema, manifest: ManifestFile) -> None: manifest ), "Should not read: id range below lower bound (29 < 30)" - assert _ManifestEvalVisitor(schema, LessThanOrEqual(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: one possible id" - ) + assert _ManifestEvalVisitor(schema, LessThanOrEqual(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: one possible id" - assert _ManifestEvalVisitor(schema, LessThanOrEqual(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: many possible ids" - ) + assert _ManifestEvalVisitor(schema, LessThanOrEqual(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: many possible ids" def test_integer_gt(schema: Schema, manifest: ManifestFile) -> None: @@ -1160,17 +1158,17 @@ def test_integer_gt(schema: Schema, manifest: ManifestFile) -> None: manifest ), "Should not read: id range above upper bound (85 < 79)" - assert not _ManifestEvalVisitor(schema, GreaterThan(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should not read: id range above upper bound (79 is not > 79)" - ) + assert not _ManifestEvalVisitor(schema, GreaterThan(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should not read: id range above upper bound (79 is not > 79)" - assert _ManifestEvalVisitor(schema, GreaterThan(Reference("id"), INT_MAX_VALUE - 1), case_sensitive=True).eval(manifest), ( - "Should read: one possible id" - ) + assert _ManifestEvalVisitor(schema, GreaterThan(Reference("id"), INT_MAX_VALUE - 1), case_sensitive=True).eval( + manifest + ), "Should read: one possible id" - assert _ManifestEvalVisitor(schema, GreaterThan(Reference("id"), INT_MAX_VALUE - 4), case_sensitive=True).eval(manifest), ( - "Should read: may possible ids" - ) + assert _ManifestEvalVisitor(schema, GreaterThan(Reference("id"), INT_MAX_VALUE - 4), case_sensitive=True).eval( + manifest + ), "Should read: may possible ids" def test_integer_gt_eq(schema: Schema, manifest: ManifestFile) -> None: @@ -1182,133 +1180,133 @@ def test_integer_gt_eq(schema: Schema, manifest: ManifestFile) -> None: manifest ), "Should not read: id range above upper bound (80 > 79)" - assert _ManifestEvalVisitor(schema, GreaterThanOrEqual(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: one possible id" - ) + assert _ManifestEvalVisitor(schema, GreaterThanOrEqual(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: one possible id" - assert _ManifestEvalVisitor(schema, GreaterThanOrEqual(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: may possible ids" - ) + assert _ManifestEvalVisitor(schema, GreaterThanOrEqual(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: may possible ids" def test_integer_eq(schema: Schema, manifest: ManifestFile) -> None: - assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MIN_VALUE - 25), case_sensitive=True).eval(manifest), ( - "Should not read: id below lower bound" - ) + assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MIN_VALUE - 25), case_sensitive=True).eval( + manifest + ), "Should not read: id below lower bound" - assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MIN_VALUE - 1), case_sensitive=True).eval(manifest), ( - "Should not read: id below lower bound" - ) + assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MIN_VALUE - 1), case_sensitive=True).eval( + manifest + ), "Should not read: id below lower bound" - assert _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: id equal to lower bound" - ) + assert _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: id equal to lower bound" - assert _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE - 4), case_sensitive=True).eval(manifest), ( - "Should read: id between lower and upper bounds" - ) + assert _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE - 4), case_sensitive=True).eval( + manifest + ), "Should read: id between lower and upper bounds" - assert _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: id equal to upper bound" - ) + assert _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: id equal to upper bound" - assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE + 1), case_sensitive=True).eval(manifest), ( - "Should not read: id above upper bound" - ) + assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE + 1), case_sensitive=True).eval( + manifest + ), "Should not read: id above upper bound" - assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE + 6), case_sensitive=True).eval(manifest), ( - "Should not read: id above upper bound" - ) + assert not _ManifestEvalVisitor(schema, EqualTo(Reference("id"), INT_MAX_VALUE + 6), case_sensitive=True).eval( + manifest + ), "Should not read: id above upper bound" def test_integer_not_eq(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MIN_VALUE - 25), case_sensitive=True).eval(manifest), ( - "Should read: id below lower bound" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MIN_VALUE - 25), case_sensitive=True).eval( + manifest + ), "Should read: id below lower bound" - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MIN_VALUE - 1), case_sensitive=True).eval(manifest), ( - "Should read: id below lower bound" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MIN_VALUE - 1), case_sensitive=True).eval( + manifest + ), "Should read: id below lower bound" - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: id equal to lower bound" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MIN_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: id equal to lower bound" - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE - 4), case_sensitive=True).eval(manifest), ( - "Should read: id between lower and upper bounds" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE - 4), case_sensitive=True).eval( + manifest + ), "Should read: id between lower and upper bounds" - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval(manifest), ( - "Should read: id equal to upper bound" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE), case_sensitive=True).eval( + manifest + ), "Should read: id equal to upper bound" - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE + 1), case_sensitive=True).eval(manifest), ( - "Should read: id above upper bound" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE + 1), case_sensitive=True).eval( + manifest + ), "Should read: id above upper bound" - assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE + 6), case_sensitive=True).eval(manifest), ( - "Should read: id above upper bound" - ) + assert _ManifestEvalVisitor(schema, NotEqualTo(Reference("id"), INT_MAX_VALUE + 6), case_sensitive=True).eval( + manifest + ), "Should read: id above upper bound" def test_integer_not_eq_rewritten(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MIN_VALUE - 25)), case_sensitive=True).eval(manifest), ( - "Should read: id below lower bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MIN_VALUE - 25)), case_sensitive=True).eval( + manifest + ), "Should read: id below lower bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MIN_VALUE - 1)), case_sensitive=True).eval(manifest), ( - "Should read: id below lower bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MIN_VALUE - 1)), case_sensitive=True).eval( + manifest + ), "Should read: id below lower bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MIN_VALUE)), case_sensitive=True).eval(manifest), ( - "Should read: id equal to lower bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MIN_VALUE)), case_sensitive=True).eval( + manifest + ), "Should read: id equal to lower bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE - 4)), case_sensitive=True).eval(manifest), ( - "Should read: id between lower and upper bounds" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE - 4)), case_sensitive=True).eval( + manifest + ), "Should read: id between lower and upper bounds" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE)), case_sensitive=True).eval(manifest), ( - "Should read: id equal to upper bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE)), case_sensitive=True).eval( + manifest + ), "Should read: id equal to upper bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE + 1)), case_sensitive=True).eval(manifest), ( - "Should read: id above upper bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE + 1)), case_sensitive=True).eval( + manifest + ), "Should read: id above upper bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE + 6)), case_sensitive=True).eval(manifest), ( - "Should read: id above upper bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("id"), INT_MAX_VALUE + 6)), case_sensitive=True).eval( + manifest + ), "Should read: id above upper bound" def test_integer_not_eq_rewritten_case_insensitive(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MIN_VALUE - 25)), case_sensitive=False).eval(manifest), ( - "Should read: id below lower bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MIN_VALUE - 25)), case_sensitive=False).eval( + manifest + ), "Should read: id below lower bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MIN_VALUE - 1)), case_sensitive=False).eval(manifest), ( - "Should read: id below lower bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MIN_VALUE - 1)), case_sensitive=False).eval( + manifest + ), "Should read: id below lower bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MIN_VALUE)), case_sensitive=False).eval(manifest), ( - "Should read: id equal to lower bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MIN_VALUE)), case_sensitive=False).eval( + manifest + ), "Should read: id equal to lower bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE - 4)), case_sensitive=False).eval(manifest), ( - "Should read: id between lower and upper bounds" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE - 4)), case_sensitive=False).eval( + manifest + ), "Should read: id between lower and upper bounds" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE)), case_sensitive=False).eval(manifest), ( - "Should read: id equal to upper bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE)), case_sensitive=False).eval( + manifest + ), "Should read: id equal to upper bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE + 1)), case_sensitive=False).eval(manifest), ( - "Should read: id above upper bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE + 1)), case_sensitive=False).eval( + manifest + ), "Should read: id above upper bound" - assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE + 6)), case_sensitive=False).eval(manifest), ( - "Should read: id above upper bound" - ) + assert _ManifestEvalVisitor(schema, Not(EqualTo(Reference("ID"), INT_MAX_VALUE + 6)), case_sensitive=False).eval( + manifest + ), "Should read: id above upper bound" def test_integer_in(schema: Schema, manifest: ManifestFile) -> None: @@ -1344,13 +1342,13 @@ def test_integer_in(schema: Schema, manifest: ManifestFile) -> None: manifest ), "Should skip: in on all nulls column" - assert _ManifestEvalVisitor(schema, In(Reference("some_nulls"), ("abc", "def")), case_sensitive=True).eval(manifest), ( - "Should read: in on some nulls column" - ) + assert _ManifestEvalVisitor(schema, In(Reference("some_nulls"), ("abc", "def")), case_sensitive=True).eval( + manifest + ), "Should read: in on some nulls column" - assert _ManifestEvalVisitor(schema, In(Reference("no_nulls"), ("abc", "def")), case_sensitive=True).eval(manifest), ( - "Should read: in on no nulls column" - ) + assert _ManifestEvalVisitor(schema, In(Reference("no_nulls"), ("abc", "def")), case_sensitive=True).eval( + manifest + ), "Should read: in on no nulls column" def test_integer_not_in(schema: Schema, manifest: ManifestFile) -> None: @@ -1386,73 +1384,73 @@ def test_integer_not_in(schema: Schema, manifest: ManifestFile) -> None: manifest ), "Should read: notIn on no nulls column" - assert _ManifestEvalVisitor(schema, NotIn(Reference("some_nulls"), ("abc", "def")), case_sensitive=True).eval(manifest), ( - "Should read: in on some nulls column" - ) + assert _ManifestEvalVisitor(schema, NotIn(Reference("some_nulls"), ("abc", "def")), case_sensitive=True).eval( + manifest + ), "Should read: in on some nulls column" - assert _ManifestEvalVisitor(schema, NotIn(Reference("no_nulls"), ("abc", "def")), case_sensitive=True).eval(manifest), ( - "Should read: in on no nulls column" - ) + assert _ManifestEvalVisitor(schema, NotIn(Reference("no_nulls"), ("abc", "def")), case_sensitive=True).eval( + manifest + ), "Should read: in on no nulls column" def test_string_starts_with(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "a"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "a"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "aa"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "aa"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "dddd"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "dddd"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "z"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "z"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, StartsWith(Reference("no_nulls"), "a"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, StartsWith(Reference("no_nulls"), "a"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert not _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "zzzz"), case_sensitive=False).eval(manifest), ( - "Should skip: range doesn't match" - ) + assert not _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "zzzz"), case_sensitive=False).eval( + manifest + ), "Should skip: range doesn't match" - assert not _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "1"), case_sensitive=False).eval(manifest), ( - "Should skip: range doesn't match" - ) + assert not _ManifestEvalVisitor(schema, StartsWith(Reference("some_nulls"), "1"), case_sensitive=False).eval( + manifest + ), "Should skip: range doesn't match" def test_string_not_starts_with(schema: Schema, manifest: ManifestFile) -> None: - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "a"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "a"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "aa"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "aa"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "dddd"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "dddd"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "z"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "z"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("no_nulls"), "a"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("no_nulls"), "a"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "zzzz"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "zzzz"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" - assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "1"), case_sensitive=False).eval(manifest), ( - "Should read: range matches" - ) + assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("some_nulls"), "1"), case_sensitive=False).eval( + manifest + ), "Should read: range matches" assert _ManifestEvalVisitor(schema, NotStartsWith(Reference("all_same_value_or_null"), "a"), case_sensitive=False).eval( manifest @@ -1609,7 +1607,7 @@ def test_dnf_to_dask(table_schema_simple: Schema) -> None: def test_expression_evaluator_null() -> None: - struct = Record(None) + struct = Record(a=None) schema = Schema(NestedField(1, "a", IntegerType(), required=False), schema_id=1) assert expression_evaluator(schema, In("a", {1, 2, 3}), case_sensitive=True)(struct) is False assert expression_evaluator(schema, NotIn("a", {1, 2, 3}), case_sensitive=True)(struct) is True @@ -1625,267 +1623,3 @@ def test_expression_evaluator_null() -> None: assert expression_evaluator(schema, LessThan("a", 1), case_sensitive=True)(struct) is False assert expression_evaluator(schema, StartsWith("a", 1), case_sensitive=True)(struct) is False assert expression_evaluator(schema, NotStartsWith("a", 1), case_sensitive=True)(struct) is True - - -def test_translate_column_names_simple_case(table_schema_simple: Schema) -> None: - """Test translate_column_names with matching column names.""" - # Create a bound expression using the original schema - unbound_expr = EqualTo("foo", "test_value") - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=table_schema_simple, case_sensitive=True)) - - # File schema has the same column names - file_schema = Schema( - NestedField(field_id=1, name="foo", field_type=StringType(), required=False), - NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), - NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, case_sensitive=True) - - # Should return an unbound expression with the same column name since they match - assert isinstance(translated_expr, EqualTo) - assert translated_expr.term == Reference("foo") - assert translated_expr.literal == literal("test_value") - - -def test_translate_column_names_different_column_names() -> None: - """Test translate_column_names with different column names in file schema.""" - # Original schema - original_schema = Schema( - NestedField(field_id=1, name="original_name", field_type=StringType(), required=False), - schema_id=1, - ) - - # Create bound expression - unbound_expr = EqualTo("original_name", "test_value") - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema has different column name but same field ID - file_schema = Schema( - NestedField(field_id=1, name="file_column_name", field_type=StringType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, case_sensitive=True) - - # Should use the file schema's column name - assert isinstance(translated_expr, EqualTo) - assert translated_expr.term == Reference("file_column_name") - assert translated_expr.literal == literal("test_value") - - -def test_translate_column_names_missing_column() -> None: - """Test translate_column_names when column is missing from file schema (such as in schema evolution).""" - # Original schema - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False), - schema_id=1, - ) - - # Create bound expression for the missing column - unbound_expr = EqualTo("missing_col", 42) - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, case_sensitive=True) - - # missing_col's default initial_default (None) does not match the expression literal (42) - assert translated_expr == AlwaysFalse() - - -def test_translate_column_names_missing_column_match_null() -> None: - """Test translate_column_names when missing column matches null.""" - # Original schema - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False), - schema_id=1, - ) - - # Create bound expression for the missing column - unbound_expr = IsNull("missing_col") - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, case_sensitive=True) - - # Should evaluate to AlwaysTrue because the missing column is treated as null - # missing_col's default initial_default (None) satisfies the IsNull predicate - assert translated_expr == AlwaysTrue() - - -def test_translate_column_names_missing_column_match_explicit_null() -> None: - """Test translate_column_names when missing column matches null.""" - # Original schema - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False), - schema_id=1, - ) - - # Create bound expression for the missing column - unbound_expr = IsNull("missing_col") - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, projected_field_values={2: None}) - - # Should evaluate to AlwaysTrue because the missing column is treated as null - # missing_col's default initial_default (None) satisfies the IsNull predicate - assert translated_expr == AlwaysTrue() - - -def test_translate_column_names_missing_column_with_initial_default() -> None: - """Test translate_column_names when missing column's initial_default matches expression.""" - # Original schema - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False, initial_default=42), - schema_id=1, - ) - - # Create bound expression for the missing column - unbound_expr = EqualTo("missing_col", 42) - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, case_sensitive=True) - - # Should evaluate to AlwaysTrue because the initial_default value (42) matches the literal (42) - assert translated_expr == AlwaysTrue() - - -def test_translate_column_names_missing_column_with_initial_default_mismatch() -> None: - """Test translate_column_names when missing column's initial_default doesn't match expression.""" - # Original schema - original_schema = Schema( - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False, initial_default=10), - schema_id=1, - ) - - # Create bound expression that won't match the default value - unbound_expr = EqualTo("missing_col", 42) - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema doesn't have this column - file_schema = Schema( - NestedField(field_id=1, name="other_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Translate column names - translated_expr = translate_column_names(bound_expr, file_schema, case_sensitive=True) - - # Should evaluate to AlwaysFalse because initial_default value (10) doesn't match literal (42) - assert translated_expr == AlwaysFalse() - - -def test_translate_column_names_missing_column_with_projected_field_matches() -> None: - """Test translate_column_names with projected field value that matches expression.""" - # Original schema with a field that has no initial_default (defaults to None) - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False), - schema_id=1, - ) - - # Create bound expression for the missing column - unbound_expr = EqualTo("missing_col", 42) - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Projected column that is missing in the file schema - translated_expr = translate_column_names(bound_expr, file_schema, projected_field_values={2: 42}) - - # Should evaluate to AlwaysTrue since projected field value matches the expression literal - # even though the field is missing in the file schema - assert translated_expr == AlwaysTrue() - - -def test_translate_column_names_missing_column_with_projected_field_mismatch() -> None: - """Test translate_column_names with projected field value that doesn't match expression.""" - # Original schema with a field that has no initial_default (defaults to None) - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False), - schema_id=1, - ) - - # Create bound expression for the missing column - unbound_expr = EqualTo("missing_col", 42) - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Projected column that is missing in the file schema - translated_expr = translate_column_names(bound_expr, file_schema, projected_field_values={2: 1}) - - # Should evaluate to AlwaysFalse since projected field value does not match the expression literal - assert translated_expr == AlwaysFalse() - - -def test_translate_column_names_missing_column_projected_field_ignores_initial_default() -> None: - """Test translate_column_names when projected field value doesn't match but initial_default does.""" - # Original schema with a field that has an initial_default - original_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - NestedField(field_id=2, name="missing_col", field_type=IntegerType(), required=False, initial_default=42), - schema_id=1, - ) - - # Create bound expression for the missing column that would match initial_default - unbound_expr = EqualTo("missing_col", 42) - bound_expr = visit(unbound_expr, visitor=BindVisitor(schema=original_schema, case_sensitive=True)) - - # File schema only has the existing column (field_id=1), missing field_id=2 - file_schema = Schema( - NestedField(field_id=1, name="existing_col", field_type=StringType(), required=False), - schema_id=1, - ) - - # Projected field value that differs from both the expression literal and initial_default - translated_expr = translate_column_names( - bound_expr, - file_schema, - projected_field_values={2: 10}, # This doesn't match expression literal (42) - ) - - # Should evaluate to AlwaysFalse since projected field value doesn't match the expression literal - assert translated_expr == AlwaysFalse() diff --git a/tests/integration/test_add_files.py b/tests/integration/test_add_files.py index 47e56be1f3..8713615218 100644 --- a/tests/integration/test_add_files.py +++ b/tests/integration/test_add_files.py @@ -16,13 +16,10 @@ # under the License. # pylint:disable=redefined-outer-name -import multiprocessing import os import re -import threading from datetime import date from typing import Iterator -from unittest import mock import pyarrow as pa import pyarrow.parquet as pq @@ -33,13 +30,11 @@ from pyiceberg.catalog import Catalog from pyiceberg.exceptions import NoSuchTableError from pyiceberg.io import FileIO -from pyiceberg.io.pyarrow import UnsupportedPyArrowTypeException, schema_to_pyarrow -from pyiceberg.manifest import DataFile +from pyiceberg.io.pyarrow import UnsupportedPyArrowTypeException, _pyarrow_schema_ensure_large_types from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.table import Table -from pyiceberg.table.metadata import TableMetadata -from pyiceberg.transforms import BucketTransform, HourTransform, IdentityTransform, MonthTransform +from pyiceberg.transforms import BucketTransform, IdentityTransform, MonthTransform from pyiceberg.types import ( BooleanType, DateType, @@ -47,7 +42,6 @@ LongType, NestedField, StringType, - TimestampType, TimestamptzType, ) @@ -235,54 +229,6 @@ def test_add_files_to_unpartitioned_table_raises_has_field_ids( tbl.add_files(file_paths=file_paths) -@pytest.mark.integration -def test_add_files_parallelized(spark: SparkSession, session_catalog: Catalog, format_version: int) -> None: - from pyiceberg.io.pyarrow import parquet_file_to_data_file - - real_parquet_file_to_data_file = parquet_file_to_data_file - - lock = threading.Lock() - unique_threads_seen = set() - cpu_count = multiprocessing.cpu_count() - - # patch the function _parquet_file_to_data_file to we can track how many unique thread IDs - # it was executed from - with mock.patch("pyiceberg.io.pyarrow.parquet_file_to_data_file") as patch_func: - - def mock_parquet_file_to_data_file(io: FileIO, table_metadata: TableMetadata, file_path: str) -> DataFile: - lock.acquire() - thread_id = threading.get_ident() # the current thread ID - unique_threads_seen.add(thread_id) - lock.release() - return real_parquet_file_to_data_file(io=io, table_metadata=table_metadata, file_path=file_path) - - patch_func.side_effect = mock_parquet_file_to_data_file - - identifier = f"default.unpartitioned_table_schema_updates_v{format_version}" - tbl = _create_table(session_catalog, identifier, format_version) - - file_paths = [ - f"s3://warehouse/default/add_files_parallel/v{format_version}/test-{i}.parquet" for i in range(cpu_count * 2) - ] - # write parquet files - for file_path in file_paths: - fo = tbl.io.new_output(file_path) - with fo.create(overwrite=True) as fos: - with pq.ParquetWriter(fos, schema=ARROW_SCHEMA) as writer: - writer.write_table(ARROW_TABLE) - - tbl.add_files(file_paths=file_paths) - - # duration creation of threadpool processor, when max_workers is not - # specified, python will add cpu_count + 4 as the number of threads in the - # pool in this case - # https://github.com/python/cpython/blob/e06bebb87e1b33f7251196e1ddb566f528c3fc98/Lib/concurrent/futures/thread.py#L173-L181 - # we check that we have at least seen the number of threads. we don't - # specify the workers in the thread pool and we can't check without - # accessing private attributes of ThreadPoolExecutor - assert len(unique_threads_seen) >= cpu_count - - @pytest.mark.integration def test_add_files_to_unpartitioned_table_with_schema_updates( spark: SparkSession, session_catalog: Catalog, format_version: int @@ -589,6 +535,11 @@ def test_add_files_with_large_and_regular_schema(spark: SparkSession, session_ca pa.field("foo", pa.string(), nullable=False), ] ) + arrow_schema_large = pa.schema( + [ + pa.field("foo", pa.large_string(), nullable=False), + ] + ) tbl = _create_table(session_catalog, identifier, format_version, schema=iceberg_schema) @@ -610,27 +561,27 @@ def test_add_files_with_large_and_regular_schema(spark: SparkSession, session_ca tbl.add_files([file_path]) table_schema = tbl.scan().to_arrow().schema - assert table_schema == arrow_schema + assert table_schema == arrow_schema_large file_path_large = f"s3://warehouse/default/unpartitioned_with_large_types/v{format_version}/test-1.parquet" _write_parquet( tbl.io, file_path_large, - arrow_schema, + arrow_schema_large, pa.Table.from_pylist( [ { "foo": "normal", } ], - schema=arrow_schema, + schema=arrow_schema_large, ), ) tbl.add_files([file_path_large]) table_schema = tbl.scan().to_arrow().schema - assert table_schema == arrow_schema + assert table_schema == arrow_schema_large @pytest.mark.integration @@ -737,17 +688,17 @@ def test_add_files_with_valid_upcast( with pq.ParquetWriter(fos, schema=pyarrow_schema_with_promoted_types) as writer: writer.write_table(pyarrow_table_with_promoted_types) - tbl.add_files(file_paths=[file_path], check_duplicate_files=False) + tbl.add_files(file_paths=[file_path]) # table's long field should cast to long on read written_arrow_table = tbl.scan().to_arrow() assert written_arrow_table == pyarrow_table_with_promoted_types.cast( pa.schema( ( pa.field("long", pa.int64(), nullable=True), - pa.field("list", pa.list_(pa.int64()), nullable=False), - pa.field("map", pa.map_(pa.string(), pa.int64()), nullable=False), + pa.field("list", pa.large_list(pa.int64()), nullable=False), + pa.field("map", pa.map_(pa.large_string(), pa.int64()), nullable=False), pa.field("double", pa.float64(), nullable=True), - pa.field("uuid", pa.uuid(), nullable=True), + pa.field("uuid", pa.binary(length=16), nullable=True), # can UUID is read as fixed length binary of length 16 ) ) ) @@ -795,7 +746,7 @@ def test_add_files_subset_of_schema(spark: SparkSession, session_catalog: Catalo "qux": date(2024, 3, 7), } ], - schema=ARROW_SCHEMA, + schema=_pyarrow_schema_ensure_large_types(ARROW_SCHEMA), ) lhs = spark.table(f"{identifier}").toPandas() @@ -899,30 +850,3 @@ def test_add_files_that_referenced_by_current_snapshot_with_check_duplicate_file with pytest.raises(ValueError) as exc_info: tbl.add_files(file_paths=[existing_files_in_table], check_duplicate_files=True) assert f"Cannot add files that are already referenced by table, files: {existing_files_in_table}" in str(exc_info.value) - - -@pytest.mark.integration -def test_add_files_hour_transform(session_catalog: Catalog) -> None: - identifier = "default.test_add_files_hour_transform" - - schema = Schema(NestedField(1, "hourly", TimestampType())) - schema_arrow = schema_to_pyarrow(schema, include_field_ids=False) - spec = PartitionSpec(PartitionField(source_id=1, field_id=1000, transform=HourTransform(), name="spec_hour")) - - tbl = _create_table(session_catalog, identifier, format_version=1, schema=schema, partition_spec=spec) - - file_path = "s3://warehouse/default/test_add_files_hour_transform/test.parquet" - - from pyiceberg.utils.datetime import micros_to_timestamp - - arrow_table = pa.Table.from_pylist( - [{"hourly": micros_to_timestamp(1743465600155254)}, {"hourly": micros_to_timestamp(1743469198047855)}], - schema=schema_arrow, - ) - - fo = tbl.io.new_output(file_path) - with fo.create(overwrite=True) as fos: - with pq.ParquetWriter(fos, schema=schema_arrow) as writer: - writer.write_table(arrow_table) - - tbl.add_files(file_paths=[file_path]) diff --git a/tests/integration/test_catalog.py b/tests/integration/test_catalog.py deleted file mode 100644 index 123aca1bef..0000000000 --- a/tests/integration/test_catalog.py +++ /dev/null @@ -1,316 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from pathlib import Path, PosixPath -from typing import Generator, List - -import pytest - -from pyiceberg.catalog import Catalog, MetastoreCatalog -from pyiceberg.catalog.hive import HiveCatalog -from pyiceberg.catalog.memory import InMemoryCatalog -from pyiceberg.catalog.rest import RestCatalog -from pyiceberg.catalog.sql import SqlCatalog -from pyiceberg.exceptions import ( - NamespaceAlreadyExistsError, - NamespaceNotEmptyError, - NoSuchNamespaceError, - NoSuchTableError, - TableAlreadyExistsError, -) -from pyiceberg.io import WAREHOUSE -from pyiceberg.schema import Schema -from tests.conftest import clean_up - - -@pytest.fixture(scope="function") -def memory_catalog(tmp_path: PosixPath) -> Generator[Catalog, None, None]: - test_catalog = InMemoryCatalog( - "test.in_memory.catalog", **{WAREHOUSE: tmp_path.absolute().as_posix(), "test.key": "test.value"} - ) - yield test_catalog - - clean_up(test_catalog) - - -@pytest.fixture(scope="function") -def sqlite_catalog_memory(warehouse: Path) -> Generator[Catalog, None, None]: - test_catalog = SqlCatalog("sqlitememory", uri="sqlite:///:memory:", warehouse=f"file://{warehouse}") - - yield test_catalog - - clean_up(test_catalog) - - -@pytest.fixture(scope="function") -def sqlite_catalog_file(warehouse: Path) -> Generator[Catalog, None, None]: - test_catalog = SqlCatalog("sqlitefile", uri=f"sqlite:////{warehouse}/sql-catalog.db", warehouse=f"file://{warehouse}") - - yield test_catalog - - clean_up(test_catalog) - - -@pytest.fixture(scope="function") -def rest_catalog() -> Generator[Catalog, None, None]: - test_catalog = RestCatalog("rest", uri="http://localhost:8181") - - yield test_catalog - - clean_up(test_catalog) - - -@pytest.fixture(scope="function") -def hive_catalog() -> Generator[Catalog, None, None]: - test_catalog = HiveCatalog( - "test_hive_catalog", - **{ - "uri": "http://localhost:9083", - "s3.endpoint": "http://localhost:9000", - "s3.access-key-id": "admin", - "s3.secret-access-key": "password", - }, - ) - yield test_catalog - clean_up(test_catalog) - - -CATALOGS = [ - pytest.lazy_fixture("memory_catalog"), - pytest.lazy_fixture("sqlite_catalog_memory"), - pytest.lazy_fixture("sqlite_catalog_file"), - pytest.lazy_fixture("rest_catalog"), - pytest.lazy_fixture("hive_catalog"), -] - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_table_with_default_location( - test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str -) -> None: - identifier = (database_name, table_name) - test_catalog.create_namespace(database_name) - test_catalog.create_table(identifier, table_schema_nested) - table = test_catalog.load_table(identifier) - assert table.name() == identifier - assert MetastoreCatalog._parse_metadata_version(table.metadata_location) == 0 - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_table_with_invalid_database(test_catalog: Catalog, table_schema_nested: Schema, table_name: str) -> None: - identifier = ("invalid", table_name) - with pytest.raises(NoSuchNamespaceError): - test_catalog.create_table(identifier, table_schema_nested) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: - test_catalog.create_namespace(database_name) - test_catalog.create_table((database_name, table_name), table_schema_nested) - with pytest.raises(TableAlreadyExistsError): - test_catalog.create_table((database_name, table_name), table_schema_nested) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_table_if_not_exists_duplicated_table( - test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str -) -> None: - test_catalog.create_namespace(database_name) - table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) - table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) - assert table1.name() == table2.name() - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: - identifier = (database_name, table_name) - test_catalog.create_namespace(database_name) - table = test_catalog.create_table(identifier, table_schema_nested) - loaded_table = test_catalog.load_table(identifier) - assert table.name() == loaded_table.name() - assert table.metadata_location == loaded_table.metadata_location - assert table.metadata == loaded_table.metadata - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_list_tables(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_list: List[str]) -> None: - test_catalog.create_namespace(database_name) - for table_name in table_list: - test_catalog.create_table((database_name, table_name), table_schema_nested) - identifier_list = test_catalog.list_tables(database_name) - assert len(identifier_list) == len(table_list) - for table_name in table_list: - assert (database_name, table_name) in identifier_list - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_rename_table(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: - new_database_name = f"{database_name}_new" - test_catalog.create_namespace(database_name) - test_catalog.create_namespace(new_database_name) - new_table_name = f"rename-{table_name}" - identifier = (database_name, table_name) - table = test_catalog.create_table(identifier, table_schema_nested) - assert table.name() == identifier - new_identifier = (new_database_name, new_table_name) - test_catalog.rename_table(identifier, new_identifier) - new_table = test_catalog.load_table(new_identifier) - assert new_table.name() == new_identifier - assert new_table.metadata_location == table.metadata_location - with pytest.raises(NoSuchTableError): - test_catalog.load_table(identifier) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_drop_table(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: - identifier = (database_name, table_name) - test_catalog.create_namespace(database_name) - table = test_catalog.create_table(identifier, table_schema_nested) - assert table.name() == identifier - test_catalog.drop_table(identifier) - with pytest.raises(NoSuchTableError): - test_catalog.load_table(identifier) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_purge_table(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: - if isinstance(test_catalog, HiveCatalog): - pytest.skip("HiveCatalog does not support purge_table operation yet") - - identifier = (database_name, table_name) - test_catalog.create_namespace(database_name) - test_catalog.create_table(identifier, table_schema_nested) - table = test_catalog.load_table(identifier) - assert table.name() == identifier - test_catalog.purge_table(identifier) - with pytest.raises(NoSuchTableError): - test_catalog.load_table(identifier) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_table_exists(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: - test_catalog.create_namespace(database_name) - test_catalog.create_table((database_name, table_name), table_schema_nested) - assert test_catalog.table_exists((database_name, table_name)) is True - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_namespace(test_catalog: Catalog, database_name: str) -> None: - test_catalog.create_namespace(database_name) - assert (database_name,) in test_catalog.list_namespaces() - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_duplicate_namespace(test_catalog: Catalog, database_name: str) -> None: - test_catalog.create_namespace(database_name) - with pytest.raises(NamespaceAlreadyExistsError): - test_catalog.create_namespace(database_name) - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_namepsace_if_not_exists(test_catalog: Catalog, database_name: str) -> None: - test_catalog.create_namespace(database_name) - test_catalog.create_namespace_if_not_exists(database_name) - assert (database_name,) in test_catalog.list_namespaces() - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_create_namespace_with_comment(test_catalog: Catalog, database_name: str) -> None: - test_properties = { - "comment": "this is a test description", - } - test_catalog.create_namespace(namespace=database_name, properties=test_properties) - loaded_database_list = test_catalog.list_namespaces() - assert (database_name,) in loaded_database_list - properties = test_catalog.load_namespace_properties(database_name) - assert properties["comment"] == "this is a test description" - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_list_namespaces(test_catalog: Catalog, database_list: List[str]) -> None: - for database_name in database_list: - test_catalog.create_namespace(database_name) - db_list = test_catalog.list_namespaces() - for database_name in database_list: - assert (database_name,) in db_list - assert len(test_catalog.list_namespaces(list(database_list)[0])) == 0 - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_drop_namespace(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: - test_catalog.create_namespace(database_name) - assert (database_name,) in test_catalog.list_namespaces() - test_catalog.create_table((database_name, table_name), table_schema_nested) - with pytest.raises(NamespaceNotEmptyError): - test_catalog.drop_namespace(database_name) - test_catalog.drop_table((database_name, table_name)) - test_catalog.drop_namespace(database_name) - assert (database_name,) not in test_catalog.list_namespaces() - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_load_namespace_properties(test_catalog: Catalog, database_name: str) -> None: - test_properties = { - "comment": "this is a test description", - "test_property1": "1", - "test_property2": "2", - "test_property3": "3", - } - test_catalog.create_namespace(database_name, test_properties) - listed_properties = test_catalog.load_namespace_properties(database_name) - for k, v in test_properties.items(): - assert v == listed_properties[k] - - -@pytest.mark.integration -@pytest.mark.parametrize("test_catalog", CATALOGS) -def test_update_namespace_properties(test_catalog: Catalog, database_name: str) -> None: - test_properties = { - "comment": "this is a test description", - "test_property1": "1", - "test_property2": "2", - "test_property3": "3", - } - removals = {"test_property1", "test_property2", "test_property3", "should_not_removed"} - updates = {"test_property4": "4", "test_property5": "5", "comment": "updated test description"} - test_catalog.create_namespace(database_name, test_properties) - update_report = test_catalog.update_namespace_properties(database_name, removals, updates) - for k in updates.keys(): - assert k in update_report.updated - for k in removals: - if k == "should_not_removed": - assert k in update_report.missing - else: - assert k in update_report.removed - assert "updated test description" == test_catalog.load_namespace_properties(database_name)["comment"] diff --git a/tests/integration/test_deletes.py b/tests/integration/test_deletes.py index abf8502ac7..ae03beea53 100644 --- a/tests/integration/test_deletes.py +++ b/tests/integration/test_deletes.py @@ -467,19 +467,21 @@ def test_partitioned_table_positional_deletes_sequence_number(spark: SparkSessio assert snapshots[2].summary == Summary( Operation.OVERWRITE, **{ + "added-files-size": snapshots[2].summary["total-files-size"], "added-data-files": "1", - "added-files-size": snapshots[2].summary["added-files-size"], "added-records": "2", "changed-partition-count": "1", - "deleted-data-files": "1", - "deleted-records": "3", - "removed-files-size": snapshots[2].summary["removed-files-size"], - "total-data-files": "2", - "total-delete-files": "1", - "total-equality-deletes": "0", "total-files-size": snapshots[2].summary["total-files-size"], - "total-position-deletes": "1", - "total-records": "4", + "total-delete-files": "0", + "total-data-files": "1", + "total-position-deletes": "0", + "total-records": "2", + "total-equality-deletes": "0", + "deleted-data-files": "2", + "removed-delete-files": "1", + "deleted-records": "5", + "removed-files-size": snapshots[2].summary["removed-files-size"], + "removed-position-deletes": "1", }, ) @@ -894,32 +896,3 @@ def test_overwrite_with_filter_case_insensitive(test_table: Table) -> None: test_table.overwrite(df=new_table, overwrite_filter=f"Idx == {record_to_overwrite['idx']}", case_sensitive=False) assert record_to_overwrite not in test_table.scan().to_arrow().to_pylist() assert new_record_to_insert in test_table.scan().to_arrow().to_pylist() - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -@pytest.mark.filterwarnings("ignore:Delete operation did not match any records") -def test_delete_on_empty_table(spark: SparkSession, session_catalog: RestCatalog, format_version: int) -> None: - identifier = f"default.test_delete_on_empty_table_{format_version}" - - run_spark_commands( - spark, - [ - f"DROP TABLE IF EXISTS {identifier}", - f""" - CREATE TABLE {identifier} ( - volume int - ) - USING iceberg - TBLPROPERTIES('format-version' = {format_version}) - """, - ], - ) - - tbl = session_catalog.load_table(identifier) - - # Perform a delete operation on the empty table - tbl.delete(AlwaysTrue()) - - # Assert that no new snapshot was created because no rows were deleted - assert len(tbl.snapshots()) == 0 diff --git a/tests/integration/test_hive_migration.py b/tests/integration/test_hive_migration.py deleted file mode 100644 index 51386d56c4..0000000000 --- a/tests/integration/test_hive_migration.py +++ /dev/null @@ -1,83 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import time -from datetime import date - -import pytest -from pyspark.sql import SparkSession - -from pyiceberg.catalog import Catalog - - -@pytest.mark.integration -def test_migrate_table( - session_catalog_hive: Catalog, - spark: SparkSession, -) -> None: - """ - Imported tables are an edge case since the partition column is not stored - in the Parquet files: - - test_migrate_table_hive_1754486926/dt=2022-01-01/part-00000-30a9798b-7597-4027-86d9-79d7c529bc87.c000.snappy.parquet - { - "type" : "record", - "name" : "spark_schema", - "fields" : [ { - "name" : "number", - "type" : "int" - } ] - } - - PyIceberg will project this column when the table is being read - """ - # Create new tables to avoid complex cleanup - src_table_identifier = f"spark_catalog.default.test_migrate_table_hive_{int(time.time())}" - dst_table_identifier = f"default.test_migrate_table_{int(time.time())}" - - spark.sql(f""" - CREATE TABLE {src_table_identifier} ( - number INTEGER - ) - PARTITIONED BY (dt date) - STORED AS parquet - """) - - spark.sql(f""" - INSERT OVERWRITE TABLE {src_table_identifier} - PARTITION (dt='2022-01-01') - VALUES (1), (2), (3) - """) - - spark.sql(f""" - INSERT OVERWRITE TABLE {src_table_identifier} - PARTITION (dt='2023-01-01') - VALUES (4), (5), (6) - """) - - # Docs: https://iceberg.apache.org/docs/latest/hive-migration/#snapshot-hive-table-to-iceberg - spark.sql(f""" - CALL hive.system.snapshot('{src_table_identifier}', 'hive.{dst_table_identifier}') - """) - - tbl = session_catalog_hive.load_table(dst_table_identifier) - assert tbl.schema().column_names == ["number", "dt"] - - assert set(tbl.scan().to_arrow().column(1).combine_chunks().tolist()) == {date(2023, 1, 1), date(2022, 1, 1)} - assert tbl.scan(row_filter="number > 3").to_arrow().column(0).combine_chunks().tolist() == [4, 5, 6] - assert tbl.scan(row_filter="dt == '2023-01-01'").to_arrow().column(0).combine_chunks().tolist() == [4, 5, 6] - assert tbl.scan(row_filter="dt == '2022-01-01'").to_arrow().column(0).combine_chunks().tolist() == [1, 2, 3] - assert tbl.scan(row_filter="dt < '2022-02-01'").to_arrow().column(0).combine_chunks().tolist() == [1, 2, 3] diff --git a/tests/integration/test_inspect_table.py b/tests/integration/test_inspect_table.py index e81050a81c..75fe92a69a 100644 --- a/tests/integration/test_inspect_table.py +++ b/tests/integration/test_inspect_table.py @@ -71,133 +71,6 @@ def _create_table(session_catalog: Catalog, identifier: str, properties: Propert return session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties=properties) -def _inspect_files_asserts(df: pa.Table, spark_df: DataFrame) -> None: - from pandas.testing import assert_frame_equal - - assert df.column_names == [ - "content", - "file_path", - "file_format", - "spec_id", - "partition", - "record_count", - "file_size_in_bytes", - "column_sizes", - "value_counts", - "null_value_counts", - "nan_value_counts", - "lower_bounds", - "upper_bounds", - "key_metadata", - "split_offsets", - "equality_ids", - "sort_order_id", - "readable_metrics", - ] - - # make sure the non-nullable fields are filled - for int_column in ["content", "spec_id", "record_count", "file_size_in_bytes"]: - for value in df[int_column]: - assert isinstance(value.as_py(), int) - - for split_offsets in df["split_offsets"]: - if split_offsets.as_py() is not None: - assert isinstance(split_offsets.as_py(), list) - - for file_path in df["file_path"]: - assert file_path.as_py().startswith("s3://") - - # sort the dataframes by content and file_path to compare them, - # as the order of the files is not guaranteed in case of all_files - lhs = df.to_pandas().sort_values(by=["content", "file_path"]).reset_index(drop=True) - rhs = spark_df.toPandas().sort_values(by=["content", "file_path"]).reset_index(drop=True) - - lhs_subset = lhs[ - [ - "content", - "file_path", - "file_format", - "spec_id", - "record_count", - "file_size_in_bytes", - "split_offsets", - "equality_ids", - "sort_order_id", - ] - ] - rhs_subset = rhs[ - [ - "content", - "file_path", - "file_format", - "spec_id", - "record_count", - "file_size_in_bytes", - "split_offsets", - "equality_ids", - "sort_order_id", - ] - ] - - assert_frame_equal(lhs_subset, rhs_subset, check_dtype=False, check_categorical=False) - - for column in df.column_names: - if column == "partition": - # Spark leaves out the partition if the table is unpartitioned - continue - for left, right in zip(lhs[column].to_list(), rhs[column].to_list()): - if isinstance(left, float) and math.isnan(left) and isinstance(right, float) and math.isnan(right): - # NaN != NaN in Python - continue - if column in [ - "column_sizes", - "value_counts", - "null_value_counts", - "nan_value_counts", - "lower_bounds", - "upper_bounds", - ]: - if isinstance(right, dict): - left = dict(left) - assert left == right, f"Difference in column {column}: {left} != {right}" - - elif column == "readable_metrics": - assert list(left.keys()) == [ - "bool", - "string", - "string_long", - "int", - "long", - "float", - "double", - "timestamp", - "timestamptz", - "date", - "binary", - "fixed", - ] - assert left.keys() == right.keys() - - for rm_column in left.keys(): - rm_lhs = left[rm_column] - rm_rhs = right[rm_column] - - assert rm_lhs["column_size"] == rm_rhs["column_size"] - assert rm_lhs["value_count"] == rm_rhs["value_count"] - assert rm_lhs["null_value_count"] == rm_rhs["null_value_count"] - assert rm_lhs["nan_value_count"] == rm_rhs["nan_value_count"] - - if rm_column == "timestamptz" and rm_rhs["lower_bound"] and rm_rhs["upper_bound"]: - # PySpark does not correctly set the timstamptz - rm_rhs["lower_bound"] = rm_rhs["lower_bound"].replace(tzinfo=pytz.utc) - rm_rhs["upper_bound"] = rm_rhs["upper_bound"].replace(tzinfo=pytz.utc) - - assert rm_lhs["lower_bound"] == rm_rhs["lower_bound"] - assert rm_lhs["upper_bound"] == rm_rhs["upper_bound"] - else: - assert left == right, f"Difference in column {column}: {left} != {right}" - - @pytest.mark.integration @pytest.mark.parametrize("format_version", [1, 2]) def test_inspect_snapshots( @@ -291,8 +164,6 @@ def test_inspect_entries( # Write some data tbl.append(arrow_table_with_null) - # Generate a DELETE entry - tbl.overwrite(arrow_table_with_null) def check_pyiceberg_df_equals_spark_df(df: pa.Table, spark_df: DataFrame) -> None: assert df.column_names == [ @@ -314,8 +185,6 @@ def check_pyiceberg_df_equals_spark_df(df: pa.Table, spark_df: DataFrame) -> Non lhs = df.to_pandas() rhs = spark_df.toPandas() - assert len(lhs) == len(rhs) - for column in df.column_names: for left, right in zip(lhs[column].to_list(), rhs[column].to_list()): if column == "data_file": @@ -792,6 +661,8 @@ def test_inspect_history(spark: SparkSession, session_catalog: Catalog, format_v def test_inspect_files( spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int ) -> None: + from pandas.testing import assert_frame_equal + identifier = "default.table_metadata_files" tbl = _create_table(session_catalog, identifier, properties={"format-version": format_version}) @@ -813,9 +684,129 @@ def test_inspect_files( delete_files_df = tbl.inspect.delete_files() - _inspect_files_asserts(files_df, spark.table(f"{identifier}.files")) - _inspect_files_asserts(data_files_df, spark.table(f"{identifier}.data_files")) - _inspect_files_asserts(delete_files_df, spark.table(f"{identifier}.delete_files")) + def inspect_files_asserts(df: pa.Table, spark_df: DataFrame) -> None: + assert df.column_names == [ + "content", + "file_path", + "file_format", + "spec_id", + "record_count", + "file_size_in_bytes", + "column_sizes", + "value_counts", + "null_value_counts", + "nan_value_counts", + "lower_bounds", + "upper_bounds", + "key_metadata", + "split_offsets", + "equality_ids", + "sort_order_id", + "readable_metrics", + ] + + # make sure the non-nullable fields are filled + for int_column in ["content", "spec_id", "record_count", "file_size_in_bytes"]: + for value in df[int_column]: + assert isinstance(value.as_py(), int) + + for split_offsets in df["split_offsets"]: + assert isinstance(split_offsets.as_py(), list) + + for file_format in df["file_format"]: + assert file_format.as_py() == "PARQUET" + + for file_path in df["file_path"]: + assert file_path.as_py().startswith("s3://") + + lhs = df.to_pandas() + rhs = spark_df.toPandas() + + lhs_subset = lhs[ + [ + "content", + "file_path", + "file_format", + "spec_id", + "record_count", + "file_size_in_bytes", + "split_offsets", + "equality_ids", + "sort_order_id", + ] + ] + rhs_subset = rhs[ + [ + "content", + "file_path", + "file_format", + "spec_id", + "record_count", + "file_size_in_bytes", + "split_offsets", + "equality_ids", + "sort_order_id", + ] + ] + + assert_frame_equal(lhs_subset, rhs_subset, check_dtype=False, check_categorical=False) + + for column in df.column_names: + for left, right in zip(lhs[column].to_list(), rhs[column].to_list()): + if isinstance(left, float) and math.isnan(left) and isinstance(right, float) and math.isnan(right): + # NaN != NaN in Python + continue + if column in [ + "column_sizes", + "value_counts", + "null_value_counts", + "nan_value_counts", + "lower_bounds", + "upper_bounds", + ]: + if isinstance(right, dict): + left = dict(left) + assert left == right, f"Difference in column {column}: {left} != {right}" + + elif column == "readable_metrics": + assert list(left.keys()) == [ + "bool", + "string", + "string_long", + "int", + "long", + "float", + "double", + "timestamp", + "timestamptz", + "date", + "binary", + "fixed", + ] + assert left.keys() == right.keys() + + for rm_column in left.keys(): + rm_lhs = left[rm_column] + rm_rhs = right[rm_column] + + assert rm_lhs["column_size"] == rm_rhs["column_size"] + assert rm_lhs["value_count"] == rm_rhs["value_count"] + assert rm_lhs["null_value_count"] == rm_rhs["null_value_count"] + assert rm_lhs["nan_value_count"] == rm_rhs["nan_value_count"] + + if rm_column == "timestamptz" and rm_rhs["lower_bound"] and rm_rhs["upper_bound"]: + # PySpark does not correctly set the timstamptz + rm_rhs["lower_bound"] = rm_rhs["lower_bound"].replace(tzinfo=pytz.utc) + rm_rhs["upper_bound"] = rm_rhs["upper_bound"].replace(tzinfo=pytz.utc) + + assert rm_lhs["lower_bound"] == rm_rhs["lower_bound"] + assert rm_lhs["upper_bound"] == rm_rhs["upper_bound"] + else: + assert left == right, f"Difference in column {column}: {left} != {right}" + + inspect_files_asserts(files_df, spark.table(f"{identifier}.files")) + inspect_files_asserts(data_files_df, spark.table(f"{identifier}.data_files")) + inspect_files_asserts(delete_files_df, spark.table(f"{identifier}.delete_files")) @pytest.mark.integration @@ -828,9 +819,6 @@ def test_inspect_files_no_snapshot(spark: SparkSession, session_catalog: Catalog files_df = tbl.refresh().inspect.files() data_files_df = tbl.inspect.data_files() delete_files_df = tbl.inspect.delete_files() - all_files_df = tbl.inspect.all_files() - all_data_files_df = tbl.inspect.all_data_files() - all_delete_files_df = tbl.inspect.all_delete_files() def inspect_files_asserts(df: pa.Table) -> None: assert df.column_names == [ @@ -838,7 +826,6 @@ def inspect_files_asserts(df: pa.Table) -> None: "file_path", "file_format", "spec_id", - "partition", "record_count", "file_size_in_bytes", "column_sizes", @@ -859,9 +846,6 @@ def inspect_files_asserts(df: pa.Table) -> None: inspect_files_asserts(files_df) inspect_files_asserts(data_files_df) inspect_files_asserts(delete_files_df) - inspect_files_asserts(all_files_df) - inspect_files_asserts(all_data_files_df) - inspect_files_asserts(all_delete_files_df) @pytest.mark.integration @@ -954,150 +938,3 @@ def test_inspect_all_manifests(spark: SparkSession, session_catalog: Catalog, fo lhs = spark.table(f"{identifier}.all_manifests").toPandas() rhs = df.to_pandas() assert_frame_equal(lhs, rhs, check_dtype=False) - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_inspect_all_files( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - identifier = "default.table_metadata_files" - - tbl = _create_table(session_catalog, identifier, properties={"format-version": format_version}) - - # append three times - for _ in range(3): - tbl.append(arrow_table_with_null) - - # configure table properties - if format_version == 2: - with tbl.transaction() as txn: - txn.set_properties({"write.delete.mode": "merge-on-read"}) - txn.set_properties({"write.update.mode": "merge-on-read"}) - spark.sql(f"DELETE FROM {identifier} WHERE int = 1") - tbl.refresh() - tbl.append(arrow_table_with_null) - spark.sql(f"UPDATE {identifier} SET string = 'b' WHERE int = 9") - spark.sql(f"DELETE FROM {identifier} WHERE int = 1") - tbl.refresh() - - all_files_df = tbl.inspect.all_files() - all_data_files_df = tbl.inspect.all_data_files() - all_delete_files_df = tbl.inspect.all_delete_files() - - _inspect_files_asserts(all_files_df, spark.table(f"{identifier}.all_files")) - _inspect_files_asserts(all_data_files_df, spark.table(f"{identifier}.all_data_files")) - _inspect_files_asserts(all_delete_files_df, spark.table(f"{identifier}.all_delete_files")) - - -@pytest.mark.integration -def test_inspect_files_format_version_3(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.table_metadata_files" - - tbl = _create_table( - session_catalog, - identifier, - properties={ - "format-version": "3", - "write.delete.mode": "merge-on-read", - "write.update.mode": "merge-on-read", - "write.merge.mode": "merge-on-read", - }, - ) - - insert_data_sql = f"""INSERT INTO {identifier} VALUES - (false, 'a', 'aaaaaaaaaaaaaaaaaaaaaa', 1, 1, 0.0, 0.0, TIMESTAMP('2023-01-01 19:25:00'), TIMESTAMP('2023-01-01 19:25:00+00:00'), DATE('2023-01-01'), X'01', X'00000000000000000000000000000000'), - (NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), - (true, 'z', 'zzzzzzzzzzzzzzzzzzzzzz', 9, 9, 0.9, 0.9, TIMESTAMP('2023-03-01 19:25:00'), TIMESTAMP('2023-03-01 19:25:00+00:00'), DATE('2023-03-01'), X'12', X'11111111111111111111111111111111'); - """ - - spark.sql(insert_data_sql) - spark.sql(insert_data_sql) - spark.sql(f"UPDATE {identifier} SET int = 2 WHERE int = 1") - spark.sql(f"DELETE FROM {identifier} WHERE int = 9") - - tbl.refresh() - - files_df = tbl.inspect.files() - data_files_df = tbl.inspect.data_files() - delete_files_df = tbl.inspect.delete_files() - - all_files_df = tbl.inspect.all_files() - all_data_files_df = tbl.inspect.all_data_files() - all_delete_files_df = tbl.inspect.all_delete_files() - - _inspect_files_asserts(files_df, spark.table(f"{identifier}.files")) - _inspect_files_asserts(data_files_df, spark.table(f"{identifier}.data_files")) - _inspect_files_asserts(delete_files_df, spark.table(f"{identifier}.delete_files")) - - _inspect_files_asserts(all_files_df, spark.table(f"{identifier}.all_files")) - _inspect_files_asserts(all_data_files_df, spark.table(f"{identifier}.all_data_files")) - _inspect_files_asserts(all_delete_files_df, spark.table(f"{identifier}.all_delete_files")) - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2, 3]) -def test_inspect_files_partitioned(spark: SparkSession, session_catalog: Catalog, format_version: int) -> None: - from pandas.testing import assert_frame_equal - - identifier = "default.table_metadata_files_partitioned" - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - spark.sql( - f""" - CREATE TABLE {identifier} ( - dt date, - int_data int - ) - PARTITIONED BY (months(dt)) - TBLPROPERTIES ('format-version'='{format_version}') - """ - ) - - if format_version > 1: - spark.sql( - f""" - ALTER TABLE {identifier} SET TBLPROPERTIES( - 'write.update.mode' = 'merge-on-read', - 'write.delete.mode' = 'merge-on-read', - 'write.merge.mode' = 'merge-on-read') - """ - ) - - spark.sql(f""" - INSERT INTO {identifier} VALUES (CAST('2025-01-01' AS date), 1), (CAST('2025-01-01' AS date), 2) - """) - - spark.sql( - f""" - ALTER TABLE {identifier} - REPLACE PARTITION FIELD dt_month WITH days(dt) - """ - ) - - spark.sql( - f""" - INSERT INTO {identifier} VALUES (CAST('2025-01-02' AS date), 2) - """ - ) - - spark.sql( - f""" - DELETE FROM {identifier} WHERE int_data = 1 - """ - ) - - tbl = session_catalog.load_table(identifier) - files_df = tbl.inspect.files() - lhs = files_df.to_pandas()[["file_path", "partition"]].sort_values("file_path", ignore_index=True).reset_index() - rhs = ( - spark.table(f"{identifier}.files") - .select(["file_path", "partition"]) - .toPandas() - .sort_values("file_path", ignore_index=True) - .reset_index() - ) - assert_frame_equal(lhs, rhs, check_dtype=False) diff --git a/tests/integration/test_partition_evolution.py b/tests/integration/test_partition_evolution.py index d489d6a5d0..0e607a46f0 100644 --- a/tests/integration/test_partition_evolution.py +++ b/tests/integration/test_partition_evolution.py @@ -140,14 +140,6 @@ def test_add_hour(catalog: Catalog) -> None: _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, HourTransform(), "hour_transform")) -@pytest.mark.integration -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -def test_add_hour_string_transform(catalog: Catalog) -> None: - table = _table(catalog) - table.update_spec().add_field("event_ts", "hour", "str_hour_transform").commit() - _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, HourTransform(), "str_hour_transform")) - - @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_add_hour_generates_default_name(catalog: Catalog) -> None: diff --git a/tests/integration/test_partitioning_key.py b/tests/integration/test_partitioning_key.py index 1908ec16f3..1066753655 100644 --- a/tests/integration/test_partitioning_key.py +++ b/tests/integration/test_partitioning_key.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. # pylint:disable=redefined-outer-name +import uuid from datetime import date, datetime, timedelta, timezone from decimal import Decimal from typing import Any, List @@ -25,7 +26,7 @@ from pyiceberg.catalog import Catalog from pyiceberg.partitioning import PartitionField, PartitionFieldValue, PartitionKey, PartitionSpec -from pyiceberg.schema import Schema +from pyiceberg.schema import Schema, make_compatible_name from pyiceberg.transforms import ( BucketTransform, DayTransform, @@ -83,7 +84,7 @@ ( [PartitionField(source_id=1, field_id=1001, transform=IdentityTransform(), name="boolean_field")], [False], - Record(False), + Record(boolean_field=False), "boolean_field=false", f"""CREATE TABLE {identifier} ( boolean_field boolean, @@ -102,7 +103,7 @@ ( [PartitionField(source_id=2, field_id=1001, transform=IdentityTransform(), name="string_field")], ["sample_string"], - Record("sample_string"), + Record(string_field="sample_string"), "string_field=sample_string", f"""CREATE TABLE {identifier} ( string_field string, @@ -121,7 +122,7 @@ ( [PartitionField(source_id=4, field_id=1001, transform=IdentityTransform(), name="int_field")], [42], - Record(42), + Record(int_field=42), "int_field=42", f"""CREATE TABLE {identifier} ( int_field int, @@ -140,7 +141,7 @@ ( [PartitionField(source_id=5, field_id=1001, transform=IdentityTransform(), name="long_field")], [1234567890123456789], - Record(1234567890123456789), + Record(long_field=1234567890123456789), "long_field=1234567890123456789", f"""CREATE TABLE {identifier} ( long_field bigint, @@ -159,7 +160,7 @@ ( [PartitionField(source_id=6, field_id=1001, transform=IdentityTransform(), name="float_field")], [3.14], - Record(3.14), + Record(float_field=3.14), "float_field=3.14", # spark writes differently as pyiceberg, Record[float_field=3.140000104904175], path:float_field=3.14 (Record has difference) # so justification (compare expected value with spark behavior) would fail. @@ -182,7 +183,7 @@ ( [PartitionField(source_id=7, field_id=1001, transform=IdentityTransform(), name="double_field")], [6.282], - Record(6.282), + Record(double_field=6.282), "double_field=6.282", # spark writes differently as pyiceberg, Record[double_field=6.2820000648498535] path:double_field=6.282 (Record has difference) # so justification (compare expected value with spark behavior) would fail. @@ -205,7 +206,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], [datetime(2023, 1, 1, 12, 0, 1, 999)], - Record(1672574401000999), + Record(timestamp_field=1672574401000999), "timestamp_field=2023-01-01T12%3A00%3A01.000999", f"""CREATE TABLE {identifier} ( timestamp_field timestamp_ntz, @@ -224,7 +225,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], [datetime(2023, 1, 1, 12, 0, 1)], - Record(1672574401000000), + Record(timestamp_field=1672574401000000), "timestamp_field=2023-01-01T12%3A00%3A01", f"""CREATE TABLE {identifier} ( timestamp_field timestamp_ntz, @@ -243,7 +244,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], [datetime(2023, 1, 1, 12, 0, 0)], - Record(1672574400000000), + Record(timestamp_field=1672574400000000), "timestamp_field=2023-01-01T12%3A00%3A00", # Spark writes differently as pyiceberg, so justification (compare expected value with spark behavior) would fail # AssertionError: assert 'timestamp_field=2023-01-01T12%3A00%3A00' in 's3://warehouse/default/test_table/data/timestamp_field=2023-01-01T12%3A00/00000-5-f9dca69a-9fb7-4830-9ef6-62d3d7afc09e-00001.parquet' @@ -267,7 +268,7 @@ ( [PartitionField(source_id=9, field_id=1001, transform=IdentityTransform(), name="timestamptz_field")], [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], - Record(1672563601000999), + Record(timestamptz_field=1672563601000999), "timestamptz_field=2023-01-01T09%3A00%3A01.000999%2B00%3A00", # Spark writes differently as pyiceberg, so justification (compare expected value with spark behavior) would fail # AssertionError: assert 'timestamptz_field=2023-01-01T09%3A00%3A01.000999%2B00%3A00' in 's3://warehouse/default/test_table/data/timestamptz_field=2023-01-01T09%3A00%3A01.000999Z/00000-5-b710fc4d-66b6-47f1-b8ae-6208f8aaa2d4-00001.parquet' @@ -291,7 +292,7 @@ ( [PartitionField(source_id=10, field_id=1001, transform=IdentityTransform(), name="date_field")], [date(2023, 1, 1)], - Record(19358), + Record(date_field=19358), "date_field=2023-01-01", f"""CREATE TABLE {identifier} ( date_field date, @@ -307,10 +308,29 @@ (CAST('2023-01-01' AS DATE), 'Associated string value for date 2023-01-01') """, ), + ( + [PartitionField(source_id=14, field_id=1001, transform=IdentityTransform(), name="uuid_field")], + [uuid.UUID("f47ac10b-58cc-4372-a567-0e02b2c3d479")], + Record(uuid_field="f47ac10b-58cc-4372-a567-0e02b2c3d479"), + "uuid_field=f47ac10b-58cc-4372-a567-0e02b2c3d479", + f"""CREATE TABLE {identifier} ( + uuid_field string, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(uuid_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('f47ac10b-58cc-4372-a567-0e02b2c3d479', 'Associated string value for UUID f47ac10b-58cc-4372-a567-0e02b2c3d479') + """, + ), ( [PartitionField(source_id=11, field_id=1001, transform=IdentityTransform(), name="binary_field")], [b"example"], - Record(b"example"), + Record(binary_field=b"example"), "binary_field=ZXhhbXBsZQ%3D%3D", f"""CREATE TABLE {identifier} ( binary_field binary, @@ -329,7 +349,7 @@ ( [PartitionField(source_id=13, field_id=1001, transform=IdentityTransform(), name="decimal_field")], [Decimal("123.45")], - Record(Decimal("123.45")), + Record(decimal_field=Decimal("123.45")), "decimal_field=123.45", f"""CREATE TABLE {identifier} ( decimal_field decimal(5,2), @@ -350,7 +370,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=MonthTransform(), name="timestamp_field_month")], [datetime(2023, 1, 1, 11, 55, 59, 999999)], - Record((2023 - 1970) * 12), + Record(timestamp_field_month=((2023 - 1970) * 12)), "timestamp_field_month=2023-01", f"""CREATE TABLE {identifier} ( timestamp_field timestamp_ntz, @@ -369,7 +389,7 @@ ( [PartitionField(source_id=9, field_id=1001, transform=MonthTransform(), name="timestamptz_field_month")], [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], - Record((2023 - 1970) * 12 + 1 - 1), + Record(timestamptz_field_month=((2023 - 1970) * 12 + 1 - 1)), "timestamptz_field_month=2023-01", f"""CREATE TABLE {identifier} ( timestamptz_field timestamp, @@ -388,7 +408,7 @@ ( [PartitionField(source_id=10, field_id=1001, transform=MonthTransform(), name="date_field_month")], [date(2023, 1, 1)], - Record((2023 - 1970) * 12), + Record(date_field_month=((2023 - 1970) * 12)), "date_field_month=2023-01", f"""CREATE TABLE {identifier} ( date_field date, @@ -408,7 +428,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=YearTransform(), name="timestamp_field_year")], [datetime(2023, 1, 1, 11, 55, 59, 999999)], - Record(2023 - 1970), + Record(timestamp_field_year=(2023 - 1970)), "timestamp_field_year=2023", f"""CREATE TABLE {identifier} ( timestamp_field timestamp, @@ -427,7 +447,7 @@ ( [PartitionField(source_id=9, field_id=1001, transform=YearTransform(), name="timestamptz_field_year")], [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], - Record(53), + Record(timestamptz_field_year=53), "timestamptz_field_year=2023", f"""CREATE TABLE {identifier} ( timestamptz_field timestamp, @@ -446,7 +466,7 @@ ( [PartitionField(source_id=10, field_id=1001, transform=YearTransform(), name="date_field_year")], [date(2023, 1, 1)], - Record(2023 - 1970), + Record(date_field_year=(2023 - 1970)), "date_field_year=2023", f"""CREATE TABLE {identifier} ( date_field date, @@ -466,7 +486,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=DayTransform(), name="timestamp_field_day")], [datetime(2023, 1, 1, 11, 55, 59, 999999)], - Record(19358), + Record(timestamp_field_day=19358), "timestamp_field_day=2023-01-01", f"""CREATE TABLE {identifier} ( timestamp_field timestamp, @@ -485,7 +505,7 @@ ( [PartitionField(source_id=9, field_id=1001, transform=DayTransform(), name="timestamptz_field_day")], [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], - Record(19358), + Record(timestamptz_field_day=19358), "timestamptz_field_day=2023-01-01", f"""CREATE TABLE {identifier} ( timestamptz_field timestamp, @@ -504,7 +524,7 @@ ( [PartitionField(source_id=10, field_id=1001, transform=DayTransform(), name="date_field_day")], [date(2023, 1, 1)], - Record(19358), + Record(date_field_day=19358), "date_field_day=2023-01-01", f"""CREATE TABLE {identifier} ( date_field date, @@ -524,7 +544,7 @@ ( [PartitionField(source_id=8, field_id=1001, transform=HourTransform(), name="timestamp_field_hour")], [datetime(2023, 1, 1, 11, 55, 59, 999999)], - Record(464603), + Record(timestamp_field_hour=464603), "timestamp_field_hour=2023-01-01-11", f"""CREATE TABLE {identifier} ( timestamp_field timestamp, @@ -543,7 +563,7 @@ ( [PartitionField(source_id=9, field_id=1001, transform=HourTransform(), name="timestamptz_field_hour")], [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], - Record(464601), + Record(timestamptz_field_hour=464601), "timestamptz_field_hour=2023-01-01-09", f"""CREATE TABLE {identifier} ( timestamptz_field timestamp, @@ -563,7 +583,7 @@ ( [PartitionField(source_id=4, field_id=1001, transform=TruncateTransform(10), name="int_field_trunc")], [12345], - Record(12340), + Record(int_field_trunc=12340), "int_field_trunc=12340", f"""CREATE TABLE {identifier} ( int_field int, @@ -582,7 +602,7 @@ ( [PartitionField(source_id=5, field_id=1001, transform=TruncateTransform(2), name="bigint_field_trunc")], [2**32 + 1], - Record(2**32), # 4294967296 + Record(bigint_field_trunc=2**32), # 4294967296 "bigint_field_trunc=4294967296", f"""CREATE TABLE {identifier} ( bigint_field bigint, @@ -601,7 +621,7 @@ ( [PartitionField(source_id=2, field_id=1001, transform=TruncateTransform(3), name="string_field_trunc")], ["abcdefg"], - Record("abc"), + Record(string_field_trunc="abc"), "string_field_trunc=abc", f"""CREATE TABLE {identifier} ( string_field string, @@ -620,7 +640,7 @@ ( [PartitionField(source_id=13, field_id=1001, transform=TruncateTransform(width=5), name="decimal_field_trunc")], [Decimal("678.93")], - Record(Decimal("678.90")), + Record(decimal_field_trunc=Decimal("678.90")), "decimal_field_trunc=678.90", # Assuming truncation width of 1 leads to truncating to 670 f"""CREATE TABLE {identifier} ( decimal_field decimal(5,2), @@ -639,7 +659,7 @@ ( [PartitionField(source_id=11, field_id=1001, transform=TruncateTransform(10), name="binary_field_trunc")], [b"HELLOICEBERG"], - Record(b"HELLOICEBE"), + Record(binary_field_trunc=b"HELLOICEBE"), "binary_field_trunc=SEVMTE9JQ0VCRQ%3D%3D", f"""CREATE TABLE {identifier} ( binary_field binary, @@ -659,7 +679,7 @@ ( [PartitionField(source_id=4, field_id=1001, transform=BucketTransform(2), name="int_field_bucket")], [10], - Record(0), + Record(int_field_bucket=0), "int_field_bucket=0", f"""CREATE TABLE {identifier} ( int_field int, @@ -685,7 +705,7 @@ datetime(2023, 1, 1, 11, 55, 59, 999999), date(2023, 1, 1), ], - Record(53, 19358), + Record(timestamp_field_year=53, date_field_day=19358), "timestamp_field_year=2023/date_field_day=2023-01-01", f"""CREATE TABLE {identifier} ( timestamp_field timestamp, @@ -707,7 +727,7 @@ ( [PartitionField(source_id=15, field_id=1001, transform=IdentityTransform(), name="special#string+field")], ["special string"], - Record("special string"), + Record(**{"special#string+field": "special string"}), # type: ignore "special%23string%2Bfield=special+string", f"""CREATE TABLE {identifier} ( `special#string+field` string @@ -772,5 +792,6 @@ def test_partition_key( snapshot.manifests(iceberg_table.io)[0].fetch_manifest_entry(iceberg_table.io)[0].data_file.file_path ) # Special characters in partition value are sanitized when written to the data file's partition field - assert spark_partition_for_justification == expected_partition_record + sanitized_record = Record(**{make_compatible_name(k): v for k, v in vars(expected_partition_record).items()}) + assert spark_partition_for_justification == sanitized_record assert expected_hive_partition_path_slice in spark_path_for_justification diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index a33b1a36bc..ee5f8a2574 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -29,7 +29,6 @@ from hive_metastore.ttypes import LockRequest, LockResponse, LockState, UnlockRequest from pyarrow.fs import S3FileSystem from pydantic_core import ValidationError -from pyspark.sql import SparkSession from pyiceberg.catalog import Catalog from pyiceberg.catalog.hive import HiveCatalog, _HiveClient @@ -42,7 +41,6 @@ LessThan, NotEqualTo, NotNaN, - NotNull, ) from pyiceberg.io import PYARROW_USE_LARGE_TYPES_ON_READ from pyiceberg.io.pyarrow import ( @@ -113,27 +111,6 @@ def test_table_properties(catalog: Catalog) -> None: assert "None type is not a supported value in properties: property_name" in str(exc_info.value) -@pytest.mark.integration -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive")]) -def test_hive_properties(catalog: Catalog) -> None: - table = create_table(catalog) - table.transaction().set_properties({"abc": "def", "p1": "123"}).commit_transaction() - - hive_client: _HiveClient = _HiveClient(catalog.properties["uri"]) - - with hive_client as open_client: - hive_table = open_client.get_table(*TABLE_NAME) - assert hive_table.parameters.get("abc") == "def" - assert hive_table.parameters.get("p1") == "123" - assert hive_table.parameters.get("not_exist_parameter") is None - - table.transaction().remove_properties("abc").commit_transaction() - - with hive_client as open_client: - hive_table = open_client.get_table(*TABLE_NAME) - assert hive_table.parameters.get("abc") is None - - @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_table_properties_dict(catalog: Catalog) -> None: @@ -321,6 +298,9 @@ def test_pyarrow_limit_with_multiple_files(catalog: Catalog) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_daft_nan(catalog: Catalog) -> None: + import daft + + daft.context.set_runner_native() table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") df = table_test_null_nan_rewritten.to_daft() assert df.count_rows() == 3 @@ -330,6 +310,9 @@ def test_daft_nan(catalog: Catalog) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_daft_nan_rewritten(catalog: Catalog) -> None: + import daft + + daft.context.set_runner_native() table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") df = table_test_null_nan_rewritten.to_daft() df = df.where(df["col_numeric"].float.is_nan()) @@ -339,20 +322,6 @@ def test_daft_nan_rewritten(catalog: Catalog) -> None: assert math.isnan(df.to_pydict()["col_numeric"][0]) -@pytest.mark.integration -@pytest.mark.filterwarnings("ignore") -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -def test_bodo_nan(catalog: Catalog, monkeypatch: pytest.MonkeyPatch) -> None: - # Avoid local Mac issues (see https://github.com/apache/iceberg-python/issues/2225) - monkeypatch.setenv("BODO_DATAFRAME_LIBRARY_RUN_PARALLEL", "0") - monkeypatch.setenv("FI_PROVIDER", "tcp") - - table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") - df = table_test_null_nan_rewritten.to_bodo() - assert len(df) == 3 - assert math.isnan(df.col_numeric.iloc[0]) - - @pytest.mark.integration @pytest.mark.filterwarnings("ignore") @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) @@ -415,8 +384,7 @@ def test_pyarrow_to_iceberg_all_types(catalog: Catalog) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("format_version", [2, 3]) -def test_pyarrow_deletes(catalog: Catalog, format_version: int) -> None: +def test_pyarrow_deletes(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -430,7 +398,7 @@ def test_pyarrow_deletes(catalog: Catalog, format_version: int) -> None: # (10, 'j'), # (11, 'k'), # (12, 'l') - test_positional_mor_deletes = catalog.load_table(f"default.test_positional_mor_deletes_v{format_version}") + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12] @@ -453,8 +421,7 @@ def test_pyarrow_deletes(catalog: Catalog, format_version: int) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("format_version", [2, 3]) -def test_pyarrow_deletes_double(catalog: Catalog, format_version: int) -> None: +def test_pyarrow_deletes_double(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -468,7 +435,7 @@ def test_pyarrow_deletes_double(catalog: Catalog, format_version: int) -> None: # (10, 'j'), # (11, 'k'), # (12, 'l') - test_positional_mor_double_deletes = catalog.load_table(f"default.test_positional_mor_double_deletes_v{format_version}") + test_positional_mor_double_deletes = catalog.load_table("default.test_positional_mor_double_deletes") arrow_table = test_positional_mor_double_deletes.scan().to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 7, 8, 10, 11, 12] @@ -491,8 +458,7 @@ def test_pyarrow_deletes_double(catalog: Catalog, format_version: int) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("format_version", [2, 3]) -def test_pyarrow_batches_deletes(catalog: Catalog, format_version: int) -> None: +def test_pyarrow_batches_deletes(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -506,7 +472,7 @@ def test_pyarrow_batches_deletes(catalog: Catalog, format_version: int) -> None: # (10, 'j'), # (11, 'k'), # (12, 'l') - test_positional_mor_deletes = catalog.load_table(f"default.test_positional_mor_deletes_v{format_version}") + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().to_arrow_batch_reader().read_all() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12] @@ -533,8 +499,7 @@ def test_pyarrow_batches_deletes(catalog: Catalog, format_version: int) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("format_version", [2, 3]) -def test_pyarrow_batches_deletes_double(catalog: Catalog, format_version: int) -> None: +def test_pyarrow_batches_deletes_double(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -548,7 +513,7 @@ def test_pyarrow_batches_deletes_double(catalog: Catalog, format_version: int) - # (10, 'j'), # (11, 'k'), # (12, 'l') - test_positional_mor_double_deletes = catalog.load_table(f"default.test_positional_mor_double_deletes_v{format_version}") + test_positional_mor_double_deletes = catalog.load_table("default.test_positional_mor_double_deletes") arrow_table = test_positional_mor_double_deletes.scan().to_arrow_batch_reader().read_all() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 7, 8, 10, 11, 12] @@ -597,15 +562,15 @@ def test_partitioned_tables(catalog: Catalog) -> None: def test_unpartitioned_uuid_table(catalog: Catalog) -> None: unpartitioned_uuid = catalog.load_table("default.test_uuid_and_fixed_unpartitioned") arrow_table_eq = unpartitioned_uuid.scan(row_filter="uuid_col == '102cb62f-e6f8-4eb0-9973-d9b012ff0967'").to_arrow() - assert arrow_table_eq["uuid_col"].to_pylist() == [uuid.UUID("102cb62f-e6f8-4eb0-9973-d9b012ff0967")] + assert arrow_table_eq["uuid_col"].to_pylist() == [uuid.UUID("102cb62f-e6f8-4eb0-9973-d9b012ff0967").bytes] arrow_table_neq = unpartitioned_uuid.scan( row_filter="uuid_col != '102cb62f-e6f8-4eb0-9973-d9b012ff0967' and uuid_col != '639cccce-c9d2-494a-a78c-278ab234f024'" ).to_arrow() assert arrow_table_neq["uuid_col"].to_pylist() == [ - uuid.UUID("ec33e4b2-a834-4cc3-8c4a-a1d3bfc2f226"), - uuid.UUID("c1b0d8e0-0b0e-4b1e-9b0a-0e0b0d0c0a0b"), - uuid.UUID("923dae77-83d6-47cd-b4b0-d383e64ee57e"), + uuid.UUID("ec33e4b2-a834-4cc3-8c4a-a1d3bfc2f226").bytes, + uuid.UUID("c1b0d8e0-0b0e-4b1e-9b0a-0e0b0d0c0a0b").bytes, + uuid.UUID("923dae77-83d6-47cd-b4b0-d383e64ee57e").bytes, ] @@ -630,18 +595,16 @@ def test_unpartitioned_fixed_table(catalog: Catalog) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("format_version", [2, 3]) -def test_scan_tag(catalog: Catalog, format_version: int) -> None: - test_positional_mor_deletes = catalog.load_table(f"default.test_positional_mor_deletes_v{format_version}") +def test_scan_tag(catalog: Catalog) -> None: + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().use_ref("tag_12").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("format_version", [2, 3]) -def test_scan_branch(catalog: Catalog, format_version: int) -> None: - test_positional_mor_deletes = catalog.load_table(f"default.test_positional_mor_deletes_v{format_version}") +def test_scan_branch(catalog: Catalog) -> None: + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().use_ref("without_5").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12] @@ -704,24 +667,6 @@ def test_filter_case_insensitive(catalog: Catalog) -> None: assert arrow_table["b"].to_pylist() == ["2"] -@pytest.mark.integration -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -def test_filters_on_top_level_struct(catalog: Catalog) -> None: - test_empty_struct = catalog.load_table("default.test_table_empty_list_and_map") - - arrow_table = test_empty_struct.scan().to_arrow() - assert None in arrow_table["col_struct"].to_pylist() - - arrow_table = test_empty_struct.scan(row_filter=NotNull("col_struct")).to_arrow() - assert arrow_table["col_struct"].to_pylist() == [{"test": 1}] - - arrow_table = test_empty_struct.scan(row_filter="col_struct is not null", case_sensitive=False).to_arrow() - assert arrow_table["col_struct"].to_pylist() == [{"test": 1}] - - arrow_table = test_empty_struct.scan(row_filter="COL_STRUCT is null", case_sensitive=False).to_arrow() - assert arrow_table["col_struct"].to_pylist() == [None] - - @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_upgrade_table_version(catalog: Catalog) -> None: @@ -861,16 +806,7 @@ def test_configure_row_group_batch_size(session_catalog: Catalog) -> None: @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -def test_table_scan_keep_types(catalog: Catalog) -> None: - expected_schema = pa.schema( - [ - pa.field("string", pa.string()), - pa.field("string-to-binary", pa.large_binary()), - pa.field("binary", pa.binary()), - pa.field("list", pa.list_(pa.large_string())), - ] - ) - +def test_table_scan_default_to_large_types(catalog: Catalog) -> None: identifier = "default.test_table_scan_default_to_large_types" arrow_table = pa.Table.from_arrays( [ @@ -879,7 +815,7 @@ def test_table_scan_keep_types(catalog: Catalog) -> None: pa.array([b"a", b"b", b"c"]), pa.array([["a", "b"], ["c", "d"], ["e", "f"]]), ], - schema=expected_schema, + names=["string", "string-to-binary", "binary", "list"], ) try: @@ -898,6 +834,15 @@ def test_table_scan_keep_types(catalog: Catalog) -> None: update_schema.update_column("string-to-binary", BinaryType()) result_table = tbl.scan().to_arrow() + + expected_schema = pa.schema( + [ + pa.field("string", pa.large_string()), + pa.field("string-to-binary", pa.large_binary()), + pa.field("binary", pa.large_binary()), + pa.field("list", pa.large_list(pa.large_string())), + ] + ) assert result_table.schema.equals(expected_schema) @@ -936,7 +881,7 @@ def test_table_scan_override_with_small_types(catalog: Catalog) -> None: expected_schema = pa.schema( [ pa.field("string", pa.string()), - pa.field("string-to-binary", pa.large_binary()), + pa.field("string-to-binary", pa.binary()), pa.field("binary", pa.binary()), pa.field("list", pa.list_(pa.string())), ] @@ -1033,44 +978,3 @@ def test_scan_with_datetime(catalog: Catalog) -> None: df = table.scan(row_filter=LessThan("datetime", yesterday)).to_pandas() assert len(df) == 0 - - -@pytest.mark.integration -# TODO: For Hive we require writing V3 -# @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog")]) -def test_initial_default(catalog: Catalog, spark: SparkSession) -> None: - identifier = "default.test_initial_default" - try: - catalog.drop_table(identifier) - except NoSuchTableError: - pass - - one_column = pa.table([pa.nulls(10, pa.int32())], names=["some_field"]) - - tbl = catalog.create_table(identifier, schema=one_column.schema, properties={"format-version": "2"}) - - tbl.append(one_column) - - # Do the bump version through Spark, since PyIceberg does not support this (yet) - spark.sql(f"ALTER TABLE {identifier} SET TBLPROPERTIES('format-version'='3')") - - with tbl.update_schema() as upd: - upd.add_column("so_true", BooleanType(), required=False, default_value=True) - - result_table = tbl.scan().filter("so_true == True").to_arrow() - - assert len(result_table) == 10 - - -@pytest.mark.integration -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) -def test_filter_after_arrow_scan(catalog: Catalog) -> None: - identifier = "test_partitioned_by_hours" - table = catalog.load_table(f"default.{identifier}") - - scan = table.scan() - assert len(scan.to_arrow()) > 0 - - scan = scan.filter("ts >= '2023-03-05T00:00:00+00:00'") - assert len(scan.to_arrow()) > 0 diff --git a/tests/integration/test_rest_manifest.py b/tests/integration/test_rest_manifest.py index 8dd9510ac8..82c41cfd93 100644 --- a/tests/integration/test_rest_manifest.py +++ b/tests/integration/test_rest_manifest.py @@ -20,25 +20,21 @@ from copy import copy from enum import Enum from tempfile import TemporaryDirectory -from typing import Any, List +from typing import Any import pytest from fastavro import reader -from pyiceberg.avro.codecs import AvroCompressionCodec from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import DataFile, write_manifest from pyiceberg.table import Table -from pyiceberg.typedef import Record from pyiceberg.utils.lazydict import LazyDict # helper function to serialize our objects to dicts to enable # direct comparison with the dicts returned by fastavro -def todict(obj: Any, spec_keys: List[str]) -> Any: - if type(obj) is Record: - return {key: obj[pos] for key, pos in zip(spec_keys, range(len(obj)))} +def todict(obj: Any) -> Any: if isinstance(obj, dict) or isinstance(obj, LazyDict): data = [] for k, v in obj.items(): @@ -47,13 +43,9 @@ def todict(obj: Any, spec_keys: List[str]) -> Any: elif isinstance(obj, Enum): return obj.value elif hasattr(obj, "__iter__") and not isinstance(obj, str) and not isinstance(obj, bytes): - return [todict(v, spec_keys) for v in obj] + return [todict(v) for v in obj] elif hasattr(obj, "__dict__"): - return { - key: todict(value, spec_keys) - for key, value in inspect.getmembers(obj) - if not callable(value) and not key.startswith("_") - } + return {key: todict(value) for key, value in inspect.getmembers(obj) if not callable(value) and not key.startswith("_")} else: return obj @@ -78,8 +70,7 @@ def table_test_all_types(catalog: Catalog) -> Table: @pytest.mark.integration -@pytest.mark.parametrize("compression", ["null", "deflate"]) -def test_write_sample_manifest(table_test_all_types: Table, compression: AvroCompressionCodec) -> None: +def test_write_sample_manifest(table_test_all_types: Table) -> None: test_snapshot = table_test_all_types.current_snapshot() if test_snapshot is None: raise ValueError("Table has no current snapshot, check the docker environment") @@ -89,7 +80,7 @@ def test_write_sample_manifest(table_test_all_types: Table, compression: AvroCom entry = test_manifest_entries[0] test_schema = table_test_all_types.schema() test_spec = table_test_all_types.spec() - wrapped_data_file_v2_debug = DataFile.from_args( + wrapped_data_file_v2_debug = DataFile( format_version=2, content=entry.data_file.content, file_path=entry.data_file.file_path, @@ -111,7 +102,9 @@ def test_write_sample_manifest(table_test_all_types: Table, compression: AvroCom ) wrapped_entry_v2 = copy(entry) wrapped_entry_v2.data_file = wrapped_data_file_v2_debug - wrapped_entry_v2_dict = todict(wrapped_entry_v2, [field.name for field in test_spec.fields]) + wrapped_entry_v2_dict = todict(wrapped_entry_v2) + # This one should not be written + del wrapped_entry_v2_dict["data_file"]["spec_id"] with TemporaryDirectory() as tmpdir: tmp_avro_file = tmpdir + "/test_write_manifest.avro" @@ -122,7 +115,6 @@ def test_write_sample_manifest(table_test_all_types: Table, compression: AvroCom schema=test_schema, output_file=output, snapshot_id=test_snapshot.snapshot_id, - avro_compression=compression, ) as manifest_writer: # For simplicity, try one entry first manifest_writer.add_entry(test_manifest_entries[0]) diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index 4462da1c8c..6a704839e2 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -15,10 +15,6 @@ # specific language governing permissions and limitations # under the License. # pylint:disable=redefined-outer-name -from datetime import date, datetime, time, timezone -from decimal import Decimal -from typing import Any -from uuid import UUID import pytest @@ -31,7 +27,6 @@ from pyiceberg.table.sorting import SortField, SortOrder from pyiceberg.table.update.schema import UpdateSchema from pyiceberg.transforms import IdentityTransform -from pyiceberg.typedef import EMPTY_DICT, Properties from pyiceberg.types import ( BinaryType, BooleanType, @@ -74,7 +69,7 @@ def simple_table(catalog: Catalog, table_schema_simple: Schema) -> Table: return _create_table_with_schema(catalog, table_schema_simple) -def _create_table_with_schema(catalog: Catalog, schema: Schema, properties: Properties = EMPTY_DICT) -> Table: +def _create_table_with_schema(catalog: Catalog, schema: Schema) -> Table: tbl_name = "default.test_schema_evolution" try: catalog.drop_table(tbl_name) @@ -83,7 +78,7 @@ def _create_table_with_schema(catalog: Catalog, schema: Schema, properties: Prop return catalog.create_table( identifier=tbl_name, schema=schema, - properties={TableProperties.DEFAULT_NAME_MAPPING: create_mapping_from_schema(schema).model_dump_json(), **properties}, + properties={TableProperties.DEFAULT_NAME_MAPPING: create_mapping_from_schema(schema).model_dump_json()}, ) @@ -159,7 +154,7 @@ def test_schema_evolution_via_transaction(catalog: Catalog) -> None: NestedField(field_id=4, name="col_integer", field_type=IntegerType(), required=False), ) - with pytest.raises(CommitFailedException, match="Requirement failed: current schema id has changed: expected 2, found 3"): + with pytest.raises(CommitFailedException) as exc_info: with tbl.transaction() as tx: # Start a new update schema_update = tx.update_schema() @@ -170,6 +165,8 @@ def test_schema_evolution_via_transaction(catalog: Catalog) -> None: # stage another update in the transaction schema_update.add_column("col_double", DoubleType()).commit() + assert "Requirement failed: current schema changed: expected id 2 != 3" in str(exc_info.value) + assert tbl.schema() == Schema( NestedField(field_id=1, name="col_uuid", field_type=UUIDType(), required=False), NestedField(field_id=2, name="col_fixed", field_type=FixedType(25), required=False), @@ -1079,8 +1076,9 @@ def test_add_required_column(catalog: Catalog) -> None: schema_ = Schema(NestedField(field_id=1, name="a", field_type=BooleanType(), required=False)) table = _create_table_with_schema(catalog, schema_) update = table.update_schema() - with pytest.raises(ValueError, match="Incompatible change: cannot add required column: data"): + with pytest.raises(ValueError) as exc_info: update.add_column(path="data", field_type=IntegerType(), required=True) + assert "Incompatible change: cannot add required column: data" in str(exc_info.value) new_schema = ( UpdateSchema(transaction=table.transaction(), allow_incompatible_changes=True) @@ -1093,102 +1091,16 @@ def test_add_required_column(catalog: Catalog) -> None: ) -@pytest.mark.integration -@pytest.mark.parametrize( - "iceberg_type, initial_default, write_default", - [ - (BooleanType(), True, False), - (IntegerType(), 123, 456), - (LongType(), 123, 456), - (FloatType(), 19.25, 22.27), - (DoubleType(), 19.25, 22.27), - (DecimalType(10, 2), Decimal("19.25"), Decimal("22.27")), - (DecimalType(10, 2), Decimal("19.25"), Decimal("22.27")), - (StringType(), "abc", "def"), - (DateType(), date(1990, 3, 1), date(1991, 3, 1)), - (TimeType(), time(19, 25, 22), time(22, 25, 22)), - (TimestampType(), datetime(1990, 5, 1, 22, 1, 1), datetime(2000, 5, 1, 22, 1, 1)), - ( - TimestamptzType(), - datetime(1990, 5, 1, 22, 1, 1, tzinfo=timezone.utc), - datetime(2000, 5, 1, 22, 1, 1, tzinfo=timezone.utc), - ), - (BinaryType(), b"123", b"456"), - (FixedType(4), b"1234", b"5678"), - (UUIDType(), UUID(int=0x12345678123456781234567812345678), UUID(int=0x32145678123456781234567812345678)), - ], -) -def test_initial_default_all_columns( - catalog: Catalog, iceberg_type: PrimitiveType, initial_default: Any, write_default: Any -) -> None: - # Round trips all the types through the rest catalog to check the serialization - table = _create_table_with_schema(catalog, Schema(), properties={TableProperties.FORMAT_VERSION: 3}) - - tx = table.update_schema() - tx.add_column(path="data", field_type=iceberg_type, required=True, default_value=initial_default) - tx.add_column(path="nested", field_type=StructType(), required=False) - tx.commit() - - tx = table.update_schema() - tx.add_column(path=("nested", "data"), field_type=iceberg_type, required=True, default_value=initial_default) - tx.commit() - - for field_id in [1, 3]: - field = table.schema().find_field(field_id) - assert field.initial_default == initial_default - assert field.write_default == initial_default - - with table.update_schema() as tx: - tx.set_default_value("data", write_default) - tx.set_default_value(("nested", "data"), write_default) - - for field_id in [1, 3]: - field = table.schema().find_field(field_id) - assert field.initial_default == initial_default - assert field.write_default == write_default - - -@pytest.mark.integration -def test_add_required_column_initial_default(catalog: Catalog) -> None: - schema_ = Schema(NestedField(field_id=1, name="a", field_type=BooleanType(), required=False)) - table = _create_table_with_schema(catalog, schema_, properties={TableProperties.FORMAT_VERSION: 3}) - - table.update_schema().add_column(path="data", field_type=IntegerType(), required=True, default_value=22).commit() - - assert table.schema() == Schema( - NestedField(field_id=1, name="a", field_type=BooleanType(), required=False), - NestedField(field_id=2, name="data", field_type=IntegerType(), required=True, initial_default=22, write_default=22), - schema_id=1, - ) - - # Update - table.update_schema().update_column(path="data", field_type=LongType()).rename_column("a", "bool").commit() - - assert table.schema() == Schema( - NestedField(field_id=1, name="bool", field_type=BooleanType(), required=False), - NestedField(field_id=2, name="data", field_type=LongType(), required=True, initial_default=22, write_default=22), - schema_id=1, - ) - - -@pytest.mark.integration -def test_add_required_column_initial_default_invalid_value(catalog: Catalog) -> None: - schema_ = Schema(NestedField(field_id=1, name="a", field_type=BooleanType(), required=False)) - table = _create_table_with_schema(catalog, schema_) - update = table.update_schema() - with pytest.raises(ValueError, match="Invalid default value: Could not convert abc into a int"): - update.add_column(path="data", field_type=IntegerType(), required=True, default_value="abc") - - @pytest.mark.integration def test_add_required_column_case_insensitive(catalog: Catalog) -> None: schema_ = Schema(NestedField(field_id=1, name="id", field_type=BooleanType(), required=False)) table = _create_table_with_schema(catalog, schema_) - with pytest.raises(ValueError, match="already exists: ID"): + with pytest.raises(ValueError) as exc_info: with table.transaction() as txn: with txn.update_schema(allow_incompatible_changes=True) as update: update.case_sensitive(False).add_column(path="ID", field_type=IntegerType(), required=True) + assert "already exists: ID" in str(exc_info.value) new_schema = ( UpdateSchema(transaction=table.transaction(), allow_incompatible_changes=True) diff --git a/tests/integration/test_statistics_operations.py b/tests/integration/test_statistics_operations.py index 09273768d9..a7b4e38802 100644 --- a/tests/integration/test_statistics_operations.py +++ b/tests/integration/test_statistics_operations.py @@ -82,10 +82,3 @@ def create_statistics_file(snapshot_id: int, type_name: str) -> StatisticsFile: update.remove_statistics(add_snapshot_id_1) assert len(tbl.metadata.statistics) == 1 - - with tbl.transaction() as txn: - with txn.update_statistics() as update: - update.set_statistics(statistics_file_snap_1) - update.set_statistics(statistics_file_snap_2) - - assert len(tbl.metadata.statistics) == 2 diff --git a/tests/integration/test_writes/test_optimistic_concurrency.py b/tests/integration/test_writes/test_optimistic_concurrency.py deleted file mode 100644 index 6ddf4c11d5..0000000000 --- a/tests/integration/test_writes/test_optimistic_concurrency.py +++ /dev/null @@ -1,93 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import pyarrow as pa -import pytest -from pyspark.sql import SparkSession - -from pyiceberg.catalog import Catalog -from pyiceberg.exceptions import CommitFailedException -from utils import _create_table - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_conflict_delete_delete( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - """This test should start passing once optimistic concurrency control has been implemented.""" - identifier = "default.test_conflict" - tbl1 = _create_table(session_catalog, identifier, {"format-version": format_version}, [arrow_table_with_null]) - tbl2 = session_catalog.load_table(identifier) - - tbl1.delete("string == 'z'") - - with pytest.raises(CommitFailedException, match="(branch main has changed: expected id ).*"): - # tbl2 isn't aware of the commit by tbl1 - tbl2.delete("string == 'z'") - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_conflict_delete_append( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - """This test should start passing once optimistic concurrency control has been implemented.""" - identifier = "default.test_conflict" - tbl1 = _create_table(session_catalog, identifier, {"format-version": format_version}, [arrow_table_with_null]) - tbl2 = session_catalog.load_table(identifier) - - # This is allowed - tbl1.delete("string == 'z'") - - with pytest.raises(CommitFailedException, match="(branch main has changed: expected id ).*"): - # tbl2 isn't aware of the commit by tbl1 - tbl2.append(arrow_table_with_null) - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_conflict_append_delete( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - """This test should start passing once optimistic concurrency control has been implemented.""" - identifier = "default.test_conflict" - tbl1 = _create_table(session_catalog, identifier, {"format-version": format_version}, [arrow_table_with_null]) - tbl2 = session_catalog.load_table(identifier) - - tbl1.append(arrow_table_with_null) - - with pytest.raises(CommitFailedException, match="(branch main has changed: expected id ).*"): - # tbl2 isn't aware of the commit by tbl1 - tbl2.delete("string == 'z'") - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_conflict_append_append( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - """This test should start passing once optimistic concurrency control has been implemented.""" - identifier = "default.test_conflict" - tbl1 = _create_table(session_catalog, identifier, {"format-version": format_version}, [arrow_table_with_null]) - tbl2 = session_catalog.load_table(identifier) - - tbl1.append(arrow_table_with_null) - - with pytest.raises(CommitFailedException, match="(branch main has changed: expected id ).*"): - # tbl2 isn't aware of the commit by tbl1 - tbl2.append(arrow_table_with_null) diff --git a/tests/integration/test_writes/test_partitioned_writes.py b/tests/integration/test_writes/test_partitioned_writes.py index e9698067c1..1e6ea1b797 100644 --- a/tests/integration/test_writes/test_partitioned_writes.py +++ b/tests/integration/test_writes/test_partitioned_writes.py @@ -294,14 +294,13 @@ def test_object_storage_location_provider_excludes_partition_path( PartitionField(source_id=nested_field.field_id, field_id=1001, transform=IdentityTransform(), name=part_col) ) - # Enable `write.object-storage.enabled` which is False by default - # `write.object-storage.partitioned-paths` is True by default - assert TableProperties.OBJECT_STORE_ENABLED_DEFAULT is False - assert TableProperties.WRITE_OBJECT_STORE_PARTITIONED_PATHS_DEFAULT is True + # write.object-storage.enabled and write.object-storage.partitioned-paths don't need to be specified as they're on by default + assert TableProperties.OBJECT_STORE_ENABLED_DEFAULT + assert TableProperties.WRITE_OBJECT_STORE_PARTITIONED_PATHS_DEFAULT tbl = _create_table( session_catalog=session_catalog, identifier=f"default.arrow_table_v{format_version}_with_null_partitioned_on_col_{part_col}", - properties={"format-version": str(format_version), TableProperties.OBJECT_STORE_ENABLED: True}, + properties={"format-version": str(format_version)}, data=[arrow_table_with_null], partition_spec=partition_spec, ) @@ -546,31 +545,27 @@ def test_summaries_with_null(spark: SparkSession, session_catalog: Catalog, arro "total-data-files": "6", "total-records": "6", } - assert "removed-files-size" in summaries[5] - assert "total-files-size" in summaries[5] assert summaries[5] == { - "removed-files-size": summaries[5]["removed-files-size"], + "removed-files-size": "15774", "changed-partition-count": "2", "total-equality-deletes": "0", "deleted-data-files": "4", "total-position-deletes": "0", "total-delete-files": "0", "deleted-records": "4", - "total-files-size": summaries[5]["total-files-size"], + "total-files-size": "8684", "total-data-files": "2", "total-records": "2", } - assert "added-files-size" in summaries[6] - assert "total-files-size" in summaries[6] assert summaries[6] == { "changed-partition-count": "2", "added-data-files": "2", "total-equality-deletes": "0", "added-records": "2", "total-position-deletes": "0", - "added-files-size": summaries[6]["added-files-size"], + "added-files-size": "7887", "total-delete-files": "0", - "total-files-size": summaries[6]["total-files-size"], + "total-files-size": "16571", "total-data-files": "4", "total-records": "4", } @@ -711,10 +706,8 @@ def test_dynamic_partition_overwrite_evolve_partition(spark: SparkSession, sessi ) identifier = f"default.partitioned_{format_version}_test_dynamic_partition_overwrite_evolve_partition" - try: + with pytest.raises(NoSuchTableError): session_catalog.drop_table(identifier) - except NoSuchTableError: - pass tbl = session_catalog.create_table( identifier=identifier, diff --git a/tests/integration/test_writes/test_writes.py b/tests/integration/test_writes/test_writes.py index 38aea1e255..1fe29c684c 100644 --- a/tests/integration/test_writes/test_writes.py +++ b/tests/integration/test_writes/test_writes.py @@ -19,16 +19,12 @@ import os import random import time -import uuid from datetime import date, datetime, timedelta -from decimal import Decimal from pathlib import Path from typing import Any, Dict from urllib.parse import urlparse -import fastavro import pandas as pd -import pandas.testing import pyarrow as pa import pyarrow.compute as pc import pyarrow.parquet as pq @@ -41,26 +37,23 @@ from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.catalog.hive import HiveCatalog +from pyiceberg.catalog.rest import RestCatalog from pyiceberg.catalog.sql import SqlCatalog -from pyiceberg.exceptions import CommitFailedException, NoSuchTableError +from pyiceberg.exceptions import NoSuchTableError from pyiceberg.expressions import And, EqualTo, GreaterThanOrEqual, In, LessThan, Not from pyiceberg.io.pyarrow import _dataframe_to_data_files from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.table import TableProperties -from pyiceberg.table.refs import MAIN_BRANCH from pyiceberg.table.sorting import SortDirection, SortField, SortOrder -from pyiceberg.transforms import DayTransform, HourTransform, IdentityTransform, Transform +from pyiceberg.transforms import DayTransform, HourTransform, IdentityTransform from pyiceberg.types import ( DateType, - DecimalType, DoubleType, IntegerType, - ListType, LongType, NestedField, StringType, - UUIDType, ) from utils import _create_table @@ -255,7 +248,7 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi "total-records": "0", } - # Append + # Overwrite assert summaries[3] == { "added-data-files": "1", "added-files-size": str(file_size), @@ -269,104 +262,6 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi } -@pytest.mark.integration -def test_summaries_partial_overwrite(spark: SparkSession, session_catalog: Catalog) -> None: - identifier = "default.test_summaries_partial_overwrite" - TEST_DATA = { - "id": [1, 2, 3, 1, 1], - "name": ["AB", "CD", "EF", "CD", "EF"], - } - pa_schema = pa.schema( - [ - pa.field("id", pa.int32()), - pa.field("name", pa.string()), - ] - ) - arrow_table = pa.Table.from_pydict(TEST_DATA, schema=pa_schema) - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, schema=pa_schema) - with tbl.update_spec() as txn: - txn.add_identity("id") - tbl.append(arrow_table) - - assert len(tbl.inspect.data_files()) == 3 - - tbl.delete(delete_filter="id == 1 and name = 'AB'") # partial overwrite data from 1 data file - - rows = spark.sql( - f""" - SELECT operation, summary - FROM {identifier}.snapshots - ORDER BY committed_at ASC - """ - ).collect() - - operations = [row.operation for row in rows] - assert operations == ["append", "overwrite"] - - summaries = [row.summary for row in rows] - - file_size = int(summaries[0]["added-files-size"]) - assert file_size > 0 - - # APPEND - assert "added-files-size" in summaries[0] - assert "total-files-size" in summaries[0] - assert summaries[0] == { - "added-data-files": "3", - "added-files-size": summaries[0]["added-files-size"], - "added-records": "5", - "changed-partition-count": "3", - "total-data-files": "3", - "total-delete-files": "0", - "total-equality-deletes": "0", - "total-files-size": summaries[0]["total-files-size"], - "total-position-deletes": "0", - "total-records": "5", - } - # Java produces: - # { - # "added-data-files": "1", - # "added-files-size": "707", - # "added-records": "2", - # "app-id": "local-1743678304626", - # "changed-partition-count": "1", - # "deleted-data-files": "1", - # "deleted-records": "3", - # "engine-name": "spark", - # "engine-version": "3.5.5", - # "iceberg-version": "Apache Iceberg 1.8.1 (commit 9ce0fcf0af7becf25ad9fc996c3bad2afdcfd33d)", - # "removed-files-size": "693", - # "spark.app.id": "local-1743678304626", - # "total-data-files": "3", - # "total-delete-files": "0", - # "total-equality-deletes": "0", - # "total-files-size": "1993", - # "total-position-deletes": "0", - # "total-records": "4" - # } - files = tbl.inspect.data_files() - assert len(files) == 3 - assert "added-files-size" in summaries[1] - assert "removed-files-size" in summaries[1] - assert "total-files-size" in summaries[1] - assert summaries[1] == { - "added-data-files": "1", - "added-files-size": summaries[1]["added-files-size"], - "added-records": "2", - "changed-partition-count": "1", - "deleted-data-files": "1", - "deleted-records": "3", - "removed-files-size": summaries[1]["removed-files-size"], - "total-data-files": "3", - "total-delete-files": "0", - "total-equality-deletes": "0", - "total-files-size": summaries[1]["total-files-size"], - "total-position-deletes": "0", - "total-records": "4", - } - assert len(tbl.scan().to_pandas()) == 4 - - @pytest.mark.integration def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_data_files" @@ -506,14 +401,7 @@ def test_python_writes_dictionary_encoded_column_with_spark_reads( tbl.append(arrow_table) spark_df = spark.sql(f"SELECT * FROM {identifier}").toPandas() pyiceberg_df = tbl.scan().to_pandas() - - # We're just interested in the content, PyIceberg actually makes a nice Categorical out of it: - # E AssertionError: Attributes of DataFrame.iloc[:, 1] (column name="name") are different - # E - # E Attribute "dtype" are different - # E [left]: object - # E [right]: CategoricalDtype(categories=['AB', 'CD', 'EF'], ordered=False, categories_dtype=object) - pandas.testing.assert_frame_equal(spark_df, pyiceberg_df, check_dtype=False, check_categorical=False) + assert spark_df.equals(pyiceberg_df) @pytest.mark.integration @@ -534,7 +422,7 @@ def test_python_writes_with_small_and_large_types_spark_reads( } pa_schema = pa.schema( [ - pa.field("foo", pa.string()), + pa.field("foo", pa.large_string()), pa.field("id", pa.int32()), pa.field("name", pa.string()), pa.field( @@ -544,7 +432,7 @@ def test_python_writes_with_small_and_large_types_spark_reads( pa.field("street", pa.string()), pa.field("city", pa.string()), pa.field("zip", pa.int32()), - pa.field("bar", pa.string()), + pa.field("bar", pa.large_string()), ] ), ), @@ -560,17 +448,17 @@ def test_python_writes_with_small_and_large_types_spark_reads( arrow_table_on_read = tbl.scan().to_arrow() assert arrow_table_on_read.schema == pa.schema( [ - pa.field("foo", pa.string()), + pa.field("foo", pa.large_string()), pa.field("id", pa.int32()), - pa.field("name", pa.string()), + pa.field("name", pa.large_string()), pa.field( "address", pa.struct( [ - pa.field("street", pa.string()), - pa.field("city", pa.string()), + pa.field("street", pa.large_string()), + pa.field("city", pa.large_string()), pa.field("zip", pa.int32()), - pa.field("bar", pa.string()), + pa.field("bar", pa.large_string()), ] ), ), @@ -892,6 +780,11 @@ def test_write_and_evolve(session_catalog: Catalog, format_version: int) -> None @pytest.mark.parametrize("format_version", [1, 2]) @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_create_table_transaction(catalog: Catalog, format_version: int) -> None: + if format_version == 1 and isinstance(catalog, RestCatalog): + pytest.skip( + "There is a bug in the REST catalog image (https://github.com/apache/iceberg/issues/8756) that prevents create and commit a staged version 1 table" + ) + identifier = f"default.arrow_create_table_transaction_{catalog.name}_{format_version}" try: @@ -944,6 +837,11 @@ def test_create_table_transaction(catalog: Catalog, format_version: int) -> None @pytest.mark.parametrize("format_version", [1, 2]) @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_create_table_with_non_default_values(catalog: Catalog, table_schema_with_all_types: Schema, format_version: int) -> None: + if format_version == 1 and isinstance(catalog, RestCatalog): + pytest.skip( + "There is a bug in the REST catalog image (https://github.com/apache/iceberg/issues/8756) that prevents create and commit a staged version 1 table" + ) + identifier = f"default.arrow_create_table_transaction_with_non_default_values_{catalog.name}_{format_version}" identifier_ref = f"default.arrow_create_table_transaction_with_non_default_values_ref_{catalog.name}_{format_version}" @@ -1155,30 +1053,6 @@ def test_hive_catalog_storage_descriptor( assert spark.sql("SELECT * FROM hive.default.test_storage_descriptor").count() == 3 -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_hive_catalog_storage_descriptor_has_changed( - session_catalog_hive: HiveCatalog, - pa_schema: pa.Schema, - arrow_table_with_null: pa.Table, - spark: SparkSession, - format_version: int, -) -> None: - tbl = _create_table( - session_catalog_hive, "default.test_storage_descriptor", {"format-version": format_version}, [arrow_table_with_null] - ) - - with tbl.transaction() as tx: - with tx.update_schema() as schema: - schema.update_column("string_long", doc="this is string_long") - schema.update_column("binary", doc="this is binary") - - with session_catalog_hive._client as open_client: - hive_table = session_catalog_hive._get_hive_table(open_client, "default", "test_storage_descriptor") - assert "this is string_long" in str(hive_table.sd) - assert "this is binary" in str(hive_table.sd) - - @pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_sanitize_character_partitioned(catalog: Catalog) -> None: @@ -1201,137 +1075,6 @@ def test_sanitize_character_partitioned(catalog: Catalog) -> None: assert len(tbl.scan().to_arrow()) == 22 -@pytest.mark.integration -@pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog")]) -def test_sanitize_character_partitioned_avro_bug(catalog: Catalog) -> None: - table_name = "default.test_table_partitioned_sanitized_character_avro" - try: - catalog.drop_table(table_name) - except NoSuchTableError: - pass - - schema = Schema( - NestedField(id=1, name="😎", field_type=StringType(), required=False), - ) - - partition_spec = PartitionSpec( - PartitionField( - source_id=1, - field_id=1001, - transform=IdentityTransform(), - name="😎", - ) - ) - - tbl = _create_table( - session_catalog=catalog, - identifier=table_name, - schema=schema, - partition_spec=partition_spec, - data=[ - pa.Table.from_arrays( - [pa.array([str(i) for i in range(22)])], schema=pa.schema([pa.field("😎", pa.string(), nullable=False)]) - ) - ], - ) - - assert len(tbl.scan().to_arrow()) == 22 - - # verify that we can read the table with DuckDB - import duckdb - - location = tbl.metadata_location - duckdb.sql("INSTALL iceberg; LOAD iceberg;") - # Configure S3 settings for DuckDB to match the catalog configuration - duckdb.sql("SET s3_endpoint='localhost:9000';") - duckdb.sql("SET s3_access_key_id='admin';") - duckdb.sql("SET s3_secret_access_key='password';") - duckdb.sql("SET s3_use_ssl=false;") - duckdb.sql("SET s3_url_style='path';") - result = duckdb.sql(f"SELECT * FROM iceberg_scan('{location}')").fetchall() - assert len(result) == 22 - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_cross_platform_special_character_compatibility( - spark: SparkSession, session_catalog: Catalog, format_version: int -) -> None: - """Test cross-platform compatibility with special characters in column names.""" - identifier = "default.test_cross_platform_special_characters" - - # Test various special characters that need sanitization - special_characters = [ - "😎", # emoji - Java produces _xD83D_xDE0E, Python produces _x1F60E - "a.b", # dot - both should produce a_x2Eb - "a#b", # hash - both should produce a_x23b - "9x", # starts with digit - both should produce _9x - "x_", # valid - should remain unchanged - "letter/abc", # slash - both should produce letter_x2Fabc - ] - - for i, special_char in enumerate(special_characters): - table_name = f"{identifier}_{format_version}_{i}" - pyiceberg_table_name = f"{identifier}_pyiceberg_{format_version}_{i}" - - try: - session_catalog.drop_table(table_name) - except Exception: - pass - try: - session_catalog.drop_table(pyiceberg_table_name) - except Exception: - pass - - try: - # Test 1: Spark writes, PyIceberg reads - spark_df = spark.createDataFrame([("test_value",)], [special_char]) - spark_df.writeTo(table_name).using("iceberg").createOrReplace() - - # Read with PyIceberg table scan - tbl = session_catalog.load_table(table_name) - pyiceberg_df = tbl.scan().to_pandas() - assert len(pyiceberg_df) == 1 - assert special_char in pyiceberg_df.columns - assert pyiceberg_df.iloc[0][special_char] == "test_value" - - # Test 2: PyIceberg writes, Spark reads - from pyiceberg.schema import Schema - from pyiceberg.types import NestedField, StringType - - schema = Schema(NestedField(field_id=1, name=special_char, field_type=StringType(), required=True)) - - tbl_pyiceberg = session_catalog.create_table( - identifier=pyiceberg_table_name, schema=schema, properties={"format-version": str(format_version)} - ) - - import pyarrow as pa - - # Create PyArrow schema with required field to match Iceberg schema - pa_schema = pa.schema([pa.field(special_char, pa.string(), nullable=False)]) - data = pa.Table.from_pydict({special_char: ["pyiceberg_value"]}, schema=pa_schema) - tbl_pyiceberg.append(data) - - # Read with Spark - spark_df_read = spark.table(pyiceberg_table_name) - spark_result = spark_df_read.collect() - - # Verify data integrity - assert len(spark_result) == 1 - assert special_char in spark_df_read.columns - assert spark_result[0][special_char] == "pyiceberg_value" - - finally: - try: - session_catalog.drop_table(table_name) - except Exception: - pass - try: - session_catalog.drop_table(pyiceberg_table_name) - except Exception: - pass - - @pytest.mark.integration @pytest.mark.parametrize("format_version", [1, 2]) def test_table_write_subset_of_schema(session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int) -> None: @@ -1421,10 +1164,10 @@ def test_table_write_schema_with_valid_upcast( pa.schema( ( pa.field("long", pa.int64(), nullable=True), - pa.field("list", pa.list_(pa.int64()), nullable=False), - pa.field("map", pa.map_(pa.string(), pa.int64()), nullable=False), + pa.field("list", pa.large_list(pa.int64()), nullable=False), + pa.field("map", pa.map_(pa.large_string(), pa.int64()), nullable=False), pa.field("double", pa.float64(), nullable=True), # can support upcasting float to double - pa.field("uuid", pa.uuid(), nullable=True), + pa.field("uuid", pa.binary(length=16), nullable=True), # can UUID is read as fixed length binary of length 16 ) ) ) @@ -1671,7 +1414,7 @@ def test_rest_catalog_with_empty_catalog_name_append_data(session_catalog: Catal @pytest.mark.integration def test_table_v1_with_null_nested_namespace(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.table_v1_with_null_nested_namespace" + identifier = "default.lower.table_v1_with_null_nested_namespace" tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_null]) assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" @@ -1896,368 +1639,3 @@ def test_abort_table_transaction_on_exception( # Validate the transaction is aborted and no partial update is applied assert len(tbl.scan().to_pandas()) == table_size # type: ignore - - -@pytest.mark.integration -def test_write_optional_list(session_catalog: Catalog) -> None: - identifier = "default.test_write_optional_list" - schema = Schema( - NestedField(field_id=1, name="name", field_type=StringType(), required=False), - NestedField( - field_id=3, - name="my_list", - field_type=ListType(element_id=45, element=StringType(), element_required=False), - required=False, - ), - ) - session_catalog.create_table_if_not_exists(identifier, schema) - - df_1 = pa.Table.from_pylist( - [ - {"name": "one", "my_list": ["test"]}, - {"name": "another", "my_list": ["test"]}, - ] - ) - session_catalog.load_table(identifier).append(df_1) - - assert len(session_catalog.load_table(identifier).scan().to_arrow()) == 2 - - df_2 = pa.Table.from_pylist( - [ - {"name": "one"}, - {"name": "another"}, - ] - ) - session_catalog.load_table(identifier).append(df_2) - - assert len(session_catalog.load_table(identifier).scan().to_arrow()) == 4 - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_double_commit_transaction( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - identifier = "default.arrow_data_files" - tbl = _create_table(session_catalog, identifier, {"format-version": format_version}, []) - - assert len(tbl.metadata.metadata_log) == 0 - - with tbl.transaction() as tx: - tx.append(arrow_table_with_null) - tx.commit_transaction() - - assert len(tbl.metadata.metadata_log) == 1 - - -@pytest.mark.integration -@pytest.mark.parametrize("format_version", [1, 2]) -def test_evolve_and_write( - spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int -) -> None: - identifier = "default.test_evolve_and_write" - tbl = _create_table(session_catalog, identifier, properties={"format-version": format_version}, schema=Schema()) - other_table = session_catalog.load_table(identifier) - - numbers = pa.array([1, 2, 3, 4], type=pa.int32()) - - with tbl.update_schema() as upd: - # This is not known by other_table - upd.add_column("id", IntegerType()) - - with other_table.transaction() as tx: - # Refreshes the underlying metadata, and the schema - other_table.refresh() - tx.append( - pa.Table.from_arrays( - [ - numbers, - ], - schema=pa.schema( - [ - pa.field("id", pa.int32(), nullable=True), - ] - ), - ) - ) - - assert session_catalog.load_table(identifier).scan().to_arrow().column(0).combine_chunks() == numbers - - -@pytest.mark.integration -def test_read_write_decimals(session_catalog: Catalog) -> None: - """Roundtrip decimal types to make sure that we correctly write them as ints""" - identifier = "default.test_read_write_decimals" - - arrow_table = pa.Table.from_pydict( - { - "decimal8": pa.array([Decimal("123.45"), Decimal("678.91")], pa.decimal128(8, 2)), - "decimal16": pa.array([Decimal("12345679.123456"), Decimal("67891234.678912")], pa.decimal128(16, 6)), - "decimal19": pa.array([Decimal("1234567890123.123456"), Decimal("9876543210703.654321")], pa.decimal128(19, 6)), - }, - ) - - tbl = _create_table( - session_catalog, - identifier, - properties={"format-version": 2}, - schema=Schema( - NestedField(1, "decimal8", DecimalType(8, 2)), - NestedField(2, "decimal16", DecimalType(16, 6)), - NestedField(3, "decimal19", DecimalType(19, 6)), - ), - ) - - tbl.append(arrow_table) - - assert tbl.scan().to_arrow() == arrow_table - - -@pytest.mark.integration -@pytest.mark.parametrize( - "transform", - [ - IdentityTransform(), - # Bucket is disabled because of an issue in Iceberg Java: - # https://github.com/apache/iceberg/pull/13324 - # BucketTransform(32) - ], -) -def test_uuid_partitioning(session_catalog: Catalog, spark: SparkSession, transform: Transform) -> None: # type: ignore - identifier = f"default.test_uuid_partitioning_{str(transform).replace('[32]', '')}" - - schema = Schema(NestedField(field_id=1, name="uuid", field_type=UUIDType(), required=True)) - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - partition_spec = PartitionSpec(PartitionField(source_id=1, field_id=1000, transform=transform, name="uuid_identity")) - - import pyarrow as pa - - arr_table = pa.Table.from_pydict( - { - "uuid": [ - uuid.UUID("00000000-0000-0000-0000-000000000000").bytes, - uuid.UUID("11111111-1111-1111-1111-111111111111").bytes, - ], - }, - schema=pa.schema( - [ - # Uuid not yet supported, so we have to stick with `binary(16)` - # https://github.com/apache/arrow/issues/46468 - pa.field("uuid", pa.binary(16), nullable=False), - ] - ), - ) - - tbl = session_catalog.create_table( - identifier=identifier, - schema=schema, - partition_spec=partition_spec, - ) - - tbl.append(arr_table) - - lhs = [r[0] for r in spark.table(identifier).collect()] - rhs = [str(u.as_py()) for u in tbl.scan().to_arrow()["uuid"].combine_chunks()] - assert lhs == rhs - - -@pytest.mark.integration -def test_avro_compression_codecs(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.test_avro_compression_codecs" - tbl = _create_table(session_catalog, identifier, schema=arrow_table_with_null.schema, data=[arrow_table_with_null]) - - current_snapshot = tbl.current_snapshot() - assert current_snapshot is not None - - with tbl.io.new_input(current_snapshot.manifest_list).open() as f: - reader = fastavro.reader(f) - assert reader.codec == "deflate" - - with tbl.transaction() as tx: - tx.set_properties(**{TableProperties.WRITE_AVRO_COMPRESSION: "null"}) # type: ignore - - tbl.append(arrow_table_with_null) - - current_snapshot = tbl.current_snapshot() - assert current_snapshot is not None - - with tbl.io.new_input(current_snapshot.manifest_list).open() as f: - reader = fastavro.reader(f) - assert reader.codec == "null" - - -@pytest.mark.integration -def test_append_to_non_existing_branch(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.test_non_existing_branch" - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, []) - with pytest.raises( - CommitFailedException, match=f"Table has no snapshots and can only be written to the {MAIN_BRANCH} BRANCH." - ): - tbl.append(arrow_table_with_null, branch="non_existing_branch") - - -@pytest.mark.integration -def test_append_to_existing_branch(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.test_existing_branch_append" - branch = "existing_branch" - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) - - assert tbl.metadata.current_snapshot_id is not None - - tbl.manage_snapshots().create_branch(snapshot_id=tbl.metadata.current_snapshot_id, branch_name=branch).commit() - tbl.append(arrow_table_with_null, branch=branch) - - assert len(tbl.scan().use_ref(branch).to_arrow()) == 6 - assert len(tbl.scan().to_arrow()) == 3 - branch_snapshot = tbl.metadata.snapshot_by_name(branch) - assert branch_snapshot is not None - main_snapshot = tbl.metadata.snapshot_by_name("main") - assert main_snapshot is not None - assert branch_snapshot.parent_snapshot_id == main_snapshot.snapshot_id - - -@pytest.mark.integration -def test_delete_to_existing_branch(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.test_existing_branch_delete" - branch = "existing_branch" - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) - - assert tbl.metadata.current_snapshot_id is not None - - tbl.manage_snapshots().create_branch(snapshot_id=tbl.metadata.current_snapshot_id, branch_name=branch).commit() - tbl.delete(delete_filter="int = 9", branch=branch) - - assert len(tbl.scan().use_ref(branch).to_arrow()) == 2 - assert len(tbl.scan().to_arrow()) == 3 - branch_snapshot = tbl.metadata.snapshot_by_name(branch) - assert branch_snapshot is not None - main_snapshot = tbl.metadata.snapshot_by_name("main") - assert main_snapshot is not None - assert branch_snapshot.parent_snapshot_id == main_snapshot.snapshot_id - - -@pytest.mark.integration -def test_overwrite_to_existing_branch(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.test_existing_branch_overwrite" - branch = "existing_branch" - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) - - assert tbl.metadata.current_snapshot_id is not None - - tbl.manage_snapshots().create_branch(snapshot_id=tbl.metadata.current_snapshot_id, branch_name=branch).commit() - tbl.overwrite(arrow_table_with_null, branch=branch) - - assert len(tbl.scan().use_ref(branch).to_arrow()) == 3 - assert len(tbl.scan().to_arrow()) == 3 - branch_snapshot = tbl.metadata.snapshot_by_name(branch) - assert branch_snapshot is not None and branch_snapshot.parent_snapshot_id is not None - delete_snapshot = tbl.metadata.snapshot_by_id(branch_snapshot.parent_snapshot_id) - assert delete_snapshot is not None - main_snapshot = tbl.metadata.snapshot_by_name("main") - assert main_snapshot is not None - assert ( - delete_snapshot.parent_snapshot_id == main_snapshot.snapshot_id - ) # Currently overwrite is a delete followed by an append operation - - -@pytest.mark.integration -def test_intertwined_branch_writes(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.test_intertwined_branch_operations" - branch1 = "existing_branch_1" - branch2 = "existing_branch_2" - - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) - - assert tbl.metadata.current_snapshot_id is not None - - tbl.manage_snapshots().create_branch(snapshot_id=tbl.metadata.current_snapshot_id, branch_name=branch1).commit() - - tbl.delete("int = 9", branch=branch1) - - tbl.append(arrow_table_with_null) - - tbl.manage_snapshots().create_branch(snapshot_id=tbl.metadata.current_snapshot_id, branch_name=branch2).commit() - - tbl.overwrite(arrow_table_with_null, branch=branch2) - - assert len(tbl.scan().use_ref(branch1).to_arrow()) == 2 - assert len(tbl.scan().use_ref(branch2).to_arrow()) == 3 - assert len(tbl.scan().to_arrow()) == 6 - - -@pytest.mark.integration -def test_branch_spark_write_py_read(session_catalog: Catalog, spark: SparkSession, arrow_table_with_null: pa.Table) -> None: - # Initialize table with branch - identifier = "default.test_branch_spark_write_py_read" - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) - branch = "existing_spark_branch" - - # Create branch in Spark - spark.sql(f"ALTER TABLE {identifier} CREATE BRANCH {branch}") - - # Spark Write - spark.sql( - f""" - DELETE FROM {identifier}.branch_{branch} - WHERE int = 9 - """ - ) - - # Refresh table to get new refs - tbl.refresh() - - # Python Read - assert len(tbl.scan().to_arrow()) == 3 - assert len(tbl.scan().use_ref(branch).to_arrow()) == 2 - - -@pytest.mark.integration -def test_branch_py_write_spark_read(session_catalog: Catalog, spark: SparkSession, arrow_table_with_null: pa.Table) -> None: - # Initialize table with branch - identifier = "default.test_branch_py_write_spark_read" - tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) - branch = "existing_py_branch" - - assert tbl.metadata.current_snapshot_id is not None - - # Create branch - tbl.manage_snapshots().create_branch(snapshot_id=tbl.metadata.current_snapshot_id, branch_name=branch).commit() - - # Python Write - tbl.delete("int = 9", branch=branch) - - # Spark Read - main_df = spark.sql( - f""" - SELECT * - FROM {identifier} - """ - ) - branch_df = spark.sql( - f""" - SELECT * - FROM {identifier}.branch_{branch} - """ - ) - assert main_df.count() == 3 - assert branch_df.count() == 2 - - -@pytest.mark.integration -def test_nanosecond_support_on_catalog(session_catalog: Catalog) -> None: - identifier = "default.test_nanosecond_support_on_catalog" - # Create a pyarrow table with a nanosecond timestamp column - table = pa.Table.from_arrays( - [ - pa.array([datetime.now()], type=pa.timestamp("ns")), - pa.array([datetime.now()], type=pa.timestamp("ns", tz="America/New_York")), - ], - names=["timestamp_ns", "timestamptz_ns"], - ) - - _create_table(session_catalog, identifier, {"format-version": "3"}, schema=table.schema) diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index f5c3082edc..e2be7872a9 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -19,18 +19,15 @@ import os import tempfile import uuid -import warnings from datetime import date from typing import Any, List, Optional from unittest.mock import MagicMock, patch from uuid import uuid4 -import pyarrow import pyarrow as pa import pyarrow.parquet as pq import pytest -from packaging import version -from pyarrow.fs import AwsDefaultS3RetryStrategy, FileType, LocalFileSystem, S3FileSystem +from pyarrow.fs import FileType, LocalFileSystem, S3FileSystem from pyiceberg.exceptions import ResolveError from pyiceberg.expressions import ( @@ -58,7 +55,7 @@ Or, ) from pyiceberg.expressions.literals import literal -from pyiceberg.io import S3_RETRY_STRATEGY_IMPL, InputStream, OutputStream, load_file_io +from pyiceberg.io import InputStream, OutputStream, load_file_io from pyiceberg.io.pyarrow import ( ICEBERG_SCHEMA, ArrowScan, @@ -84,7 +81,7 @@ from pyiceberg.table import FileScanTask, TableProperties from pyiceberg.table.metadata import TableMetadataV2 from pyiceberg.table.name_mapping import create_mapping_from_schema -from pyiceberg.transforms import HourTransform, IdentityTransform +from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import UTF8, Properties, Record from pyiceberg.types import ( BinaryType, @@ -109,11 +106,6 @@ from tests.catalog.test_base import InMemoryCatalog from tests.conftest import UNIFIED_AWS_SESSION_PROPERTIES -skip_if_pyarrow_too_old = pytest.mark.skipif( - version.parse(pyarrow.__version__) < version.parse("20.0.0"), - reason="Requires pyarrow version >= 20.0.0", -) - def test_pyarrow_infer_local_fs_from_path() -> None: """Test path with `file` scheme and no scheme both use LocalFileSystem""" @@ -970,10 +962,6 @@ def file_map(schema_map: Schema, tmpdir: str) -> str: def project( schema: Schema, files: List[str], expr: Optional[BooleanExpression] = None, table_schema: Optional[Schema] = None ) -> pa.Table: - def _set_spec_id(datafile: DataFile) -> DataFile: - datafile.spec_id = 0 - return datafile - return ArrowScan( table_metadata=TableMetadataV2( location="file://a/b/", @@ -989,15 +977,13 @@ def _set_spec_id(datafile: DataFile) -> DataFile: ).to_table( tasks=[ FileScanTask( - _set_spec_id( - DataFile.from_args( - content=DataFileContent.DATA, - file_path=file, - file_format=FileFormat.PARQUET, - partition={}, - record_count=3, - file_size_in_bytes=3, - ) + DataFile( + content=DataFileContent.DATA, + file_path=file, + file_format=FileFormat.PARQUET, + partition={}, + record_count=3, + file_size_in_bytes=3, ) ) for file in files @@ -1079,10 +1065,10 @@ def test_read_map(schema_map: Schema, file_map: str) -> None: assert ( repr(result_table.schema) - == """properties: map - child 0, entries: struct not null - child 0, key: string not null - child 1, value: string not null""" + == """properties: map + child 0, entries: struct not null + child 0, key: large_string not null + child 1, value: large_string not null""" ) @@ -1167,7 +1153,7 @@ def test_identity_transform_column_projection(tmp_path: str, catalog: InMemoryCa properties={TableProperties.DEFAULT_NAME_MAPPING: create_mapping_from_schema(schema).model_dump_json()}, ) - file_data = pa.array(["foo", "bar", "baz"], type=pa.string()) + file_data = pa.array(["foo"], type=pa.string()) file_loc = f"{tmp_path}/test.parquet" pq.write_table(pa.table([file_data], names=["other_field"]), file_loc) @@ -1177,12 +1163,12 @@ def test_identity_transform_column_projection(tmp_path: str, catalog: InMemoryCa parquet_column_mapping=parquet_path_to_id_mapping(table.schema()), ) - unpartitioned_file = DataFile.from_args( + unpartitioned_file = DataFile( content=DataFileContent.DATA, file_path=file_loc, file_format=FileFormat.PARQUET, # projected value - partition=Record(1), + partition=Record(partition_id=1), file_size_in_bytes=os.path.getsize(file_loc), sort_order_id=None, spec_id=table.metadata.default_spec_id, @@ -1195,24 +1181,15 @@ def test_identity_transform_column_projection(tmp_path: str, catalog: InMemoryCa with transaction.update_snapshot().overwrite() as update: update.append_data_file(unpartitioned_file) - schema = pa.schema([("other_field", pa.string()), ("partition_id", pa.int32())]) - assert table.scan().to_arrow() == pa.table( - { - "other_field": ["foo", "bar", "baz"], - "partition_id": [1, 1, 1], - }, - schema=schema, - ) - # Test that row filter works with partition value projection - assert table.scan(row_filter="partition_id = 1").to_arrow() == pa.table( - { - "other_field": ["foo", "bar", "baz"], - "partition_id": [1, 1, 1], - }, - schema=schema, + assert ( + str(table.scan().to_arrow()) + == """pyarrow.Table +other_field: large_string +partition_id: int64 +---- +other_field: [["foo"]] +partition_id: [[1]]""" ) - # Test that row filter does not return any rows for a non-existing partition value - assert len(table.scan(row_filter="partition_id = -1").to_arrow()) == 0 def test_identity_transform_columns_projection(tmp_path: str, catalog: InMemoryCatalog) -> None: @@ -1248,12 +1225,12 @@ def test_identity_transform_columns_projection(tmp_path: str, catalog: InMemoryC parquet_column_mapping=parquet_path_to_id_mapping(table.schema()), ) - unpartitioned_file = DataFile.from_args( + unpartitioned_file = DataFile( content=DataFileContent.DATA, file_path=file_loc, file_format=FileFormat.PARQUET, # projected value - partition=Record(2, 3), + partition=Record(field_2=2, field_3=3), file_size_in_bytes=os.path.getsize(file_loc), sort_order_id=None, spec_id=table.metadata.default_spec_id, @@ -1269,9 +1246,9 @@ def test_identity_transform_columns_projection(tmp_path: str, catalog: InMemoryC assert ( str(table.scan().to_arrow()) == """pyarrow.Table -field_1: string -field_2: int32 -field_3: int32 +field_1: large_string +field_2: int64 +field_3: int64 ---- field_1: [["foo"]] field_2: [[2]] @@ -1494,9 +1471,9 @@ def test_projection_maps_of_structs(schema_map_of_structs: Schema, file_map_of_s assert actual.as_py() == expected assert ( repr(result_table.schema) - == """locations: map> - child 0, entries: struct not null> not null - child 0, key: string not null + == """locations: map> + child 0, entries: struct not null> not null + child 0, key: large_string not null child 1, value: struct not null child 0, latitude: double not null child 1, longitude: double not null @@ -1563,7 +1540,7 @@ def deletes_file(tmp_path: str, example_task: FileScanTask) -> str: def test_read_deletes(deletes_file: str, example_task: FileScanTask) -> None: - deletes = _read_deletes(PyArrowFileIO(), DataFile.from_args(file_path=deletes_file, file_format=FileFormat.PARQUET)) + deletes = _read_deletes(LocalFileSystem(), DataFile(file_path=deletes_file, file_format=FileFormat.PARQUET)) assert set(deletes.keys()) == {example_task.file.file_path} assert list(deletes.values())[0] == pa.chunked_array([[1, 3, 5]]) @@ -1572,9 +1549,7 @@ def test_delete(deletes_file: str, example_task: FileScanTask, table_schema_simp metadata_location = "file://a/b/c.json" example_task_with_delete = FileScanTask( data_file=example_task.file, - delete_files={ - DataFile.from_args(content=DataFileContent.POSITION_DELETES, file_path=deletes_file, file_format=FileFormat.PARQUET) - }, + delete_files={DataFile(content=DataFileContent.POSITION_DELETES, file_path=deletes_file, file_format=FileFormat.PARQUET)}, ) with_deletes = ArrowScan( table_metadata=TableMetadataV2( @@ -1608,8 +1583,8 @@ def test_delete_duplicates(deletes_file: str, example_task: FileScanTask, table_ example_task_with_delete = FileScanTask( data_file=example_task.file, delete_files={ - DataFile.from_args(content=DataFileContent.POSITION_DELETES, file_path=deletes_file, file_format=FileFormat.PARQUET), - DataFile.from_args(content=DataFileContent.POSITION_DELETES, file_path=deletes_file, file_format=FileFormat.PARQUET), + DataFile(content=DataFileContent.POSITION_DELETES, file_path=deletes_file, file_format=FileFormat.PARQUET), + DataFile(content=DataFileContent.POSITION_DELETES, file_path=deletes_file, file_format=FileFormat.PARQUET), }, ) @@ -1696,7 +1671,7 @@ def test_new_output_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None: @pytest.mark.gcs @pytest.mark.skip(reason="Open issue on Arrow: https://github.com/apache/arrow/issues/36993") def test_write_and_read_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None: - """Test writing and reading a file using PyArrowFile""" + """Test writing and reading a file using FsspecInputFile and FsspecOutputFile""" location = f"gs://warehouse/{uuid4()}.txt" output_file = pyarrow_fileio_gcs.new_output(location=location) with output_file.create() as f: @@ -1713,7 +1688,7 @@ def test_write_and_read_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None: @pytest.mark.gcs def test_getting_length_of_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None: - """Test getting the length of PyArrowFile""" + """Test getting the length of an FsspecInputFile and FsspecOutputFile""" filename = str(uuid4()) output_file = pyarrow_fileio_gcs.new_output(location=f"gs://warehouse/{filename}") @@ -1777,7 +1752,7 @@ def test_read_specified_bytes_for_file_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> @pytest.mark.gcs @pytest.mark.skip(reason="Open issue on Arrow: https://github.com/apache/arrow/issues/36993") def test_raise_on_opening_file_not_found_gcs(pyarrow_fileio_gcs: PyArrowFileIO) -> None: - """Test that PyArrowFile raises appropriately when the gcs file is not found""" + """Test that an fsspec input file raises appropriately when the gcs file is not found""" filename = str(uuid4()) input_file = pyarrow_fileio_gcs.new_input(location=f"gs://warehouse/{filename}") @@ -1839,7 +1814,7 @@ def test_converting_an_outputfile_to_an_inputfile_gcs(pyarrow_fileio_gcs: PyArro @pytest.mark.gcs @pytest.mark.skip(reason="Open issue on Arrow: https://github.com/apache/arrow/issues/36993") def test_writing_avro_file_gcs(generated_manifest_entry_file: str, pyarrow_fileio_gcs: PyArrowFileIO) -> None: - """Test that bytes match when reading a local avro file, writing it using pyarrow file-io, and then reading it again""" + """Test that bytes match when reading a local avro file, writing it using fsspec file-io, and then reading it again""" filename = str(uuid4()) with PyArrowFileIO().new_input(location=generated_manifest_entry_file).open() as f: b1 = f.read() @@ -1852,192 +1827,6 @@ def test_writing_avro_file_gcs(generated_manifest_entry_file: str, pyarrow_filei pyarrow_fileio_gcs.delete(f"gs://warehouse/{filename}") -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_new_input_file_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test creating a new input file from pyarrow file-io""" - filename = str(uuid4()) - - input_file = pyarrow_fileio_adls.new_input(f"{adls_scheme}://warehouse/{filename}") - - assert isinstance(input_file, PyArrowFile) - assert input_file.location == f"{adls_scheme}://warehouse/{filename}" - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_new_output_file_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test creating a new output file from pyarrow file-io""" - filename = str(uuid4()) - - output_file = pyarrow_fileio_adls.new_output(f"{adls_scheme}://warehouse/{filename}") - - assert isinstance(output_file, PyArrowFile) - assert output_file.location == f"{adls_scheme}://warehouse/{filename}" - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_write_and_read_file_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test writing and reading a file using PyArrowFile""" - location = f"{adls_scheme}://warehouse/{uuid4()}.txt" - output_file = pyarrow_fileio_adls.new_output(location=location) - with output_file.create() as f: - assert f.write(b"foo") == 3 - - assert output_file.exists() - - input_file = pyarrow_fileio_adls.new_input(location=location) - with input_file.open() as f: - assert f.read() == b"foo" - - pyarrow_fileio_adls.delete(input_file) - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_getting_length_of_file_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test getting the length of PyArrowFile""" - filename = str(uuid4()) - - output_file = pyarrow_fileio_adls.new_output(location=f"{adls_scheme}://warehouse/{filename}") - with output_file.create() as f: - f.write(b"foobar") - - assert len(output_file) == 6 - - input_file = pyarrow_fileio_adls.new_input(location=f"{adls_scheme}://warehouse/{filename}") - assert len(input_file) == 6 - - pyarrow_fileio_adls.delete(output_file) - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_file_tell_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - location = f"{adls_scheme}://warehouse/{uuid4()}" - - output_file = pyarrow_fileio_adls.new_output(location=location) - with output_file.create() as write_file: - write_file.write(b"foobar") - - input_file = pyarrow_fileio_adls.new_input(location=location) - with input_file.open() as f: - f.seek(0) - assert f.tell() == 0 - f.seek(1) - assert f.tell() == 1 - f.seek(3) - assert f.tell() == 3 - f.seek(0) - assert f.tell() == 0 - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_read_specified_bytes_for_file_adls(pyarrow_fileio_adls: PyArrowFileIO) -> None: - location = f"abfss://warehouse/{uuid4()}" - - output_file = pyarrow_fileio_adls.new_output(location=location) - with output_file.create() as write_file: - write_file.write(b"foo") - - input_file = pyarrow_fileio_adls.new_input(location=location) - with input_file.open() as f: - f.seek(0) - assert b"f" == f.read(1) - f.seek(0) - assert b"fo" == f.read(2) - f.seek(1) - assert b"o" == f.read(1) - f.seek(1) - assert b"oo" == f.read(2) - f.seek(0) - assert b"foo" == f.read(999) # test reading amount larger than entire content length - - pyarrow_fileio_adls.delete(input_file) - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_raise_on_opening_file_not_found_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test that PyArrowFile raises appropriately when the adls file is not found""" - - filename = str(uuid4()) - input_file = pyarrow_fileio_adls.new_input(location=f"{adls_scheme}://warehouse/{filename}") - with pytest.raises(FileNotFoundError) as exc_info: - input_file.open().read() - - assert filename in str(exc_info.value) - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_checking_if_a_file_exists_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test checking if a file exists""" - non_existent_file = pyarrow_fileio_adls.new_input(location=f"{adls_scheme}://warehouse/does-not-exist.txt") - assert not non_existent_file.exists() - - location = f"{adls_scheme}://warehouse/{uuid4()}" - output_file = pyarrow_fileio_adls.new_output(location=location) - assert not output_file.exists() - with output_file.create() as f: - f.write(b"foo") - - existing_input_file = pyarrow_fileio_adls.new_input(location=location) - assert existing_input_file.exists() - - existing_output_file = pyarrow_fileio_adls.new_output(location=location) - assert existing_output_file.exists() - - pyarrow_fileio_adls.delete(existing_output_file) - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_closing_a_file_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test closing an output file and input file""" - filename = str(uuid4()) - output_file = pyarrow_fileio_adls.new_output(location=f"{adls_scheme}://warehouse/{filename}") - with output_file.create() as write_file: - write_file.write(b"foo") - assert not write_file.closed # type: ignore - assert write_file.closed # type: ignore - - input_file = pyarrow_fileio_adls.new_input(location=f"{adls_scheme}://warehouse/{filename}") - with input_file.open() as f: - assert not f.closed # type: ignore - assert f.closed # type: ignore - - pyarrow_fileio_adls.delete(f"{adls_scheme}://warehouse/{filename}") - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_converting_an_outputfile_to_an_inputfile_adls(pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test converting an output file to an input file""" - filename = str(uuid4()) - output_file = pyarrow_fileio_adls.new_output(location=f"{adls_scheme}://warehouse/{filename}") - input_file = output_file.to_input_file() - assert input_file.location == output_file.location - - -@pytest.mark.adls -@skip_if_pyarrow_too_old -def test_writing_avro_file_adls(generated_manifest_entry_file: str, pyarrow_fileio_adls: PyArrowFileIO, adls_scheme: str) -> None: - """Test that bytes match when reading a local avro file, writing it using pyarrow file-io, and then reading it again""" - filename = str(uuid4()) - with PyArrowFileIO().new_input(location=generated_manifest_entry_file).open() as f: - b1 = f.read() - with pyarrow_fileio_adls.new_output(location=f"{adls_scheme}://warehouse/{filename}").create() as out_f: - out_f.write(b1) - with pyarrow_fileio_adls.new_input(location=f"{adls_scheme}://warehouse/{filename}").open() as in_f: - b2 = in_f.read() - assert b1 == b2 # Check that bytes of read from local avro file match bytes written to s3 - - pyarrow_fileio_adls.delete(f"{adls_scheme}://warehouse/{filename}") - - def test_parse_location() -> None: def check_results(location: str, expected_schema: str, expected_netloc: str, expected_uri: str) -> None: schema, netloc, uri = PyArrowFileIO.parse_location(location) @@ -2354,114 +2143,18 @@ def test_partition_for_demo() -> None: ) result = _determine_partitions(partition_spec, test_schema, arrow_table) assert {table_partition.partition_key.partition for table_partition in result} == { - Record(2, 2020), - Record(100, 2021), - Record(4, 2021), - Record(4, 2022), - Record(2, 2022), - Record(5, 2019), + Record(n_legs_identity=2, year_identity=2020), + Record(n_legs_identity=100, year_identity=2021), + Record(n_legs_identity=4, year_identity=2021), + Record(n_legs_identity=4, year_identity=2022), + Record(n_legs_identity=2, year_identity=2022), + Record(n_legs_identity=5, year_identity=2019), } assert ( pa.concat_tables([table_partition.arrow_table_partition for table_partition in result]).num_rows == arrow_table.num_rows ) -def test_partition_for_nested_field() -> None: - schema = Schema( - NestedField(id=1, name="foo", field_type=StringType(), required=True), - NestedField( - id=2, - name="bar", - field_type=StructType( - NestedField(id=3, name="baz", field_type=TimestampType(), required=False), - NestedField(id=4, name="qux", field_type=IntegerType(), required=False), - ), - required=True, - ), - ) - - spec = PartitionSpec(PartitionField(source_id=3, field_id=1000, transform=HourTransform(), name="ts")) - - from datetime import datetime - - t1 = datetime(2025, 7, 11, 9, 30, 0) - t2 = datetime(2025, 7, 11, 10, 30, 0) - - test_data = [ - {"foo": "a", "bar": {"baz": t1, "qux": 1}}, - {"foo": "b", "bar": {"baz": t2, "qux": 2}}, - ] - - arrow_table = pa.Table.from_pylist(test_data, schema=schema.as_arrow()) - partitions = _determine_partitions(spec, schema, arrow_table) - partition_values = {p.partition_key.partition[0] for p in partitions} - - assert partition_values == {486729, 486730} - - -def test_partition_for_deep_nested_field() -> None: - schema = Schema( - NestedField( - id=1, - name="foo", - field_type=StructType( - NestedField( - id=2, - name="bar", - field_type=StructType(NestedField(id=3, name="baz", field_type=StringType(), required=False)), - required=True, - ) - ), - required=True, - ) - ) - - spec = PartitionSpec(PartitionField(source_id=3, field_id=1000, transform=IdentityTransform(), name="qux")) - - test_data = [ - {"foo": {"bar": {"baz": "data-1"}}}, - {"foo": {"bar": {"baz": "data-2"}}}, - {"foo": {"bar": {"baz": "data-1"}}}, - ] - - arrow_table = pa.Table.from_pylist(test_data, schema=schema.as_arrow()) - partitions = _determine_partitions(spec, schema, arrow_table) - - assert len(partitions) == 2 # 2 unique partitions - partition_values = {p.partition_key.partition[0] for p in partitions} - assert partition_values == {"data-1", "data-2"} - - -def test_inspect_partition_for_nested_field(catalog: InMemoryCatalog) -> None: - schema = Schema( - NestedField(id=1, name="foo", field_type=StringType(), required=True), - NestedField( - id=2, - name="bar", - field_type=StructType( - NestedField(id=3, name="baz", field_type=StringType(), required=False), - NestedField(id=4, name="qux", field_type=IntegerType(), required=False), - ), - required=True, - ), - ) - spec = PartitionSpec(PartitionField(source_id=3, field_id=1000, transform=IdentityTransform(), name="part")) - catalog.create_namespace("default") - table = catalog.create_table("default.test_partition_in_struct", schema=schema, partition_spec=spec) - test_data = [ - {"foo": "a", "bar": {"baz": "data-a", "qux": 1}}, - {"foo": "b", "bar": {"baz": "data-b", "qux": 2}}, - ] - - arrow_table = pa.Table.from_pylist(test_data, schema=table.schema().as_arrow()) - table.append(arrow_table) - partitions_table = table.inspect.partitions() - partitions = partitions_table["partition"].to_pylist() - - assert len(partitions) == 2 - assert {part["part"] for part in partitions} == {"data-a", "data-b"} - - def test_identity_partition_on_multi_columns() -> None: test_pa_schema = pa.schema([("born_year", pa.int64()), ("n_legs", pa.int64()), ("animal", pa.string())]) test_schema = Schema( @@ -2479,7 +2172,7 @@ def test_identity_partition_on_multi_columns() -> None: (None, 4, "Kirin"), (2021, None, "Fish"), ] * 2 - expected = {Record(test_rows[i][1], test_rows[i][0]) for i in range(len(test_rows))} + expected = {Record(n_legs_identity=test_rows[i][1], year_identity=test_rows[i][0]) for i in range(len(test_rows))} partition_spec = PartitionSpec( PartitionField(source_id=2, field_id=1002, transform=IdentityTransform(), name="n_legs_identity"), PartitionField(source_id=1, field_id=1001, transform=IdentityTransform(), name="year_identity"), @@ -2510,17 +2203,6 @@ def test_identity_partition_on_multi_columns() -> None: ) == arrow_table.sort_by([("born_year", "ascending"), ("n_legs", "ascending"), ("animal", "ascending")]) -def test_initial_value() -> None: - # Have some fake data, otherwise it will generate a table without records - data = pa.record_batch([pa.nulls(10, pa.int64())], names=["some_field"]) - result = _to_requested_schema( - Schema(NestedField(1, "we-love-22", LongType(), required=True, initial_default=22)), Schema(), data - ) - assert result.column_names == ["we-love-22"] - for val in result[0]: - assert val.as_py() == 22 - - def test__to_requested_schema_timestamps( arrow_table_schema_with_all_timestamp_precisions: pa.Schema, arrow_table_with_all_timestamp_precisions: pa.Table, @@ -2603,14 +2285,14 @@ def _s3_region_map(bucket: str) -> str: raise OSError("Unknown bucket") # For a pyarrow io instance with configured default s3 region - pyarrow_file_io = PyArrowFileIO({"s3.region": user_provided_region, "s3.resolve-region": "true"}) + pyarrow_file_io = PyArrowFileIO({"s3.region": user_provided_region}) with patch("pyarrow.fs.resolve_s3_region") as mock_s3_region_resolver: mock_s3_region_resolver.side_effect = _s3_region_map # The region is set to provided region if bucket region cannot be resolved with caplog.at_level(logging.WARNING): assert pyarrow_file_io.new_input("s3://non-exist-bucket/path/to/file")._filesystem.region == user_provided_region - assert "Unable to resolve region for bucket non-exist-bucket" in caplog.text + assert f"Unable to resolve region for bucket non-exist-bucket, using default region {user_provided_region}" in caplog.text for bucket_region in bucket_regions: # For s3 scheme, region is overwritten by resolved bucket region if different from user provided region @@ -2636,21 +2318,3 @@ def test_pyarrow_io_multi_fs() -> None: # Same PyArrowFileIO instance resolves local file input to LocalFileSystem assert isinstance(pyarrow_file_io.new_input("file:///path/to/file")._filesystem, LocalFileSystem) - - -class SomeRetryStrategy(AwsDefaultS3RetryStrategy): - def __init__(self) -> None: - super().__init__() - warnings.warn("Initialized SomeRetryStrategy 👍") - - -def test_retry_strategy() -> None: - io = PyArrowFileIO(properties={S3_RETRY_STRATEGY_IMPL: "tests.io.test_pyarrow.SomeRetryStrategy"}) - with pytest.warns(UserWarning, match="Initialized SomeRetryStrategy.*"): - io.new_input("s3://bucket/path/to/file") - - -def test_retry_strategy_not_found() -> None: - io = PyArrowFileIO(properties={S3_RETRY_STRATEGY_IMPL: "pyiceberg.DoesNotExist"}) - with pytest.warns(UserWarning, match="Could not initialize S3 retry strategy: pyiceberg.DoesNotExist"): - io.new_input("s3://bucket/path/to/file") diff --git a/tests/io/test_pyarrow_stats.py b/tests/io/test_pyarrow_stats.py index 7a4d47317a..788891711e 100644 --- a/tests/io/test_pyarrow_stats.py +++ b/tests/io/test_pyarrow_stats.py @@ -27,7 +27,6 @@ timedelta, timezone, ) -from decimal import Decimal from typing import ( Any, Dict, @@ -195,7 +194,7 @@ def test_record_count() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert datafile.record_count == 4 @@ -208,7 +207,7 @@ def test_value_counts() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 7 assert datafile.value_counts[1] == 4 @@ -229,7 +228,7 @@ def test_column_sizes() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.column_sizes) == 7 # these values are an artifact of how the write_table encodes the columns @@ -249,7 +248,7 @@ def test_null_and_nan_counts() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.null_value_counts) == 7 assert datafile.null_value_counts[1] == 1 @@ -276,7 +275,7 @@ def test_bounds() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.lower_bounds) == 2 assert datafile.lower_bounds[1].decode() == "aaaaaaaaaaaaaaaa" @@ -320,7 +319,7 @@ def test_metrics_mode_none() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 0 assert len(datafile.null_value_counts) == 0 @@ -339,7 +338,7 @@ def test_metrics_mode_counts() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 7 assert len(datafile.null_value_counts) == 7 @@ -358,7 +357,7 @@ def test_metrics_mode_full() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 7 assert len(datafile.null_value_counts) == 7 @@ -383,7 +382,7 @@ def test_metrics_mode_non_default_trunc() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 7 assert len(datafile.null_value_counts) == 7 @@ -409,7 +408,7 @@ def test_column_metrics_mode() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 6 assert len(datafile.null_value_counts) == 6 @@ -447,9 +446,6 @@ def construct_test_table_primitive_types() -> Tuple[pq.FileMetaData, Union[Table {"id": 10, "name": "strings", "required": False, "type": "string"}, {"id": 11, "name": "uuids", "required": False, "type": "uuid"}, {"id": 12, "name": "binaries", "required": False, "type": "binary"}, - {"id": 13, "name": "decimal8", "required": False, "type": "decimal(5, 2)"}, - {"id": 14, "name": "decimal16", "required": False, "type": "decimal(16, 6)"}, - {"id": 15, "name": "decimal32", "required": False, "type": "decimal(19, 6)"}, ], }, ], @@ -474,9 +470,6 @@ def construct_test_table_primitive_types() -> Tuple[pq.FileMetaData, Union[Table strings = ["hello", "world"] uuids = [uuid.uuid3(uuid.NAMESPACE_DNS, "foo").bytes, uuid.uuid3(uuid.NAMESPACE_DNS, "bar").bytes] binaries = [b"hello", b"world"] - decimal8 = pa.array([Decimal("123.45"), Decimal("678.91")], pa.decimal128(8, 2)) - decimal16 = pa.array([Decimal("12345679.123456"), Decimal("67891234.678912")], pa.decimal128(16, 6)) - decimal32 = pa.array([Decimal("1234567890123.123456"), Decimal("9876543210703.654321")], pa.decimal128(19, 6)) table = pa.Table.from_pydict( { @@ -492,9 +485,6 @@ def construct_test_table_primitive_types() -> Tuple[pq.FileMetaData, Union[Table "strings": strings, "uuids": uuids, "binaries": binaries, - "decimal8": decimal8, - "decimal16": decimal16, - "decimal32": decimal32, }, schema=arrow_schema, ) @@ -502,7 +492,7 @@ def construct_test_table_primitive_types() -> Tuple[pq.FileMetaData, Union[Table metadata_collector: List[Any] = [] with pa.BufferOutputStream() as f: - with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector, store_decimal_as_integer=True) as writer: + with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) return metadata_collector[0], table_metadata @@ -518,15 +508,15 @@ def test_metrics_primitive_types() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) - assert len(datafile.value_counts) == 15 - assert len(datafile.null_value_counts) == 15 + assert len(datafile.value_counts) == 12 + assert len(datafile.null_value_counts) == 12 assert len(datafile.nan_value_counts) == 0 tz = timezone(timedelta(seconds=19800)) - assert len(datafile.lower_bounds) == 15 + assert len(datafile.lower_bounds) == 12 assert datafile.lower_bounds[1] == STRUCT_BOOL.pack(False) assert datafile.lower_bounds[2] == STRUCT_INT32.pack(23) assert datafile.lower_bounds[3] == STRUCT_INT64.pack(2) @@ -539,11 +529,8 @@ def test_metrics_primitive_types() -> None: assert datafile.lower_bounds[10] == b"he" assert datafile.lower_bounds[11] == uuid.uuid3(uuid.NAMESPACE_DNS, "foo").bytes assert datafile.lower_bounds[12] == b"he" - assert datafile.lower_bounds[13][::-1].ljust(4, b"\x00") == STRUCT_INT32.pack(12345) - assert datafile.lower_bounds[14][::-1].ljust(8, b"\x00") == STRUCT_INT64.pack(12345679123456) - assert str(int.from_bytes(datafile.lower_bounds[15], byteorder="big", signed=True)).encode("utf-8") == b"1234567890123123456" - assert len(datafile.upper_bounds) == 15 + assert len(datafile.upper_bounds) == 12 assert datafile.upper_bounds[1] == STRUCT_BOOL.pack(True) assert datafile.upper_bounds[2] == STRUCT_INT32.pack(89) assert datafile.upper_bounds[3] == STRUCT_INT64.pack(54) @@ -556,9 +543,6 @@ def test_metrics_primitive_types() -> None: assert datafile.upper_bounds[10] == b"wp" assert datafile.upper_bounds[11] == uuid.uuid3(uuid.NAMESPACE_DNS, "bar").bytes assert datafile.upper_bounds[12] == b"wp" - assert datafile.upper_bounds[13][::-1].ljust(4, b"\x00") == STRUCT_INT32.pack(67891) - assert datafile.upper_bounds[14][::-1].ljust(8, b"\x00") == STRUCT_INT64.pack(67891234678912) - assert str(int.from_bytes(datafile.upper_bounds[15], byteorder="big", signed=True)).encode("utf-8") == b"9876543210703654321" def construct_test_table_invalid_upper_bound() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: @@ -622,7 +606,7 @@ def test_metrics_invalid_upper_bound() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert len(datafile.value_counts) == 4 assert len(datafile.null_value_counts) == 4 @@ -648,7 +632,7 @@ def test_offsets() -> None: stats_columns=compute_statistics_plan(schema, table_metadata.properties), parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) assert datafile.split_offsets is not None assert len(datafile.split_offsets) == 1 @@ -723,7 +707,7 @@ def test_read_missing_statistics() -> None: parquet_column_mapping=parquet_path_to_id_mapping(schema), ) - datafile = DataFile.from_args(**statistics.to_serialized_dict()) + datafile = DataFile(**statistics.to_serialized_dict()) # expect only "strings" column values to be reflected in the # upper_bound, lower_bound and null_value_counts props of datafile diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index 9d5772d01c..d13822f5ce 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -225,18 +225,18 @@ def test_pyarrow_timestamp_tz_invalid_tz() -> None: visit_pyarrow(pyarrow_type, _ConvertToIceberg()) -@pytest.mark.parametrize("pyarrow_type", [pa.string(), pa.large_string(), pa.string_view()]) -def test_pyarrow_string_to_iceberg(pyarrow_type: pa.DataType) -> None: +def test_pyarrow_string_to_iceberg() -> None: + pyarrow_type = pa.large_string() converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) assert converted_iceberg_type == StringType() - assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pa.large_string() + assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pyarrow_type -@pytest.mark.parametrize("pyarrow_type", [pa.binary(), pa.large_binary(), pa.binary_view()]) -def test_pyarrow_variable_binary_to_iceberg(pyarrow_type: pa.DataType) -> None: +def test_pyarrow_variable_binary_to_iceberg() -> None: + pyarrow_type = pa.large_binary() converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) assert converted_iceberg_type == BinaryType() - assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pa.large_binary() + assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pyarrow_type def test_pyarrow_struct_to_iceberg() -> None: @@ -836,5 +836,5 @@ def test_expression_to_complementary_pyarrow( # Notice an isNan predicate on a str column is automatically converted to always false and removed from Or and thus will not appear in the pc.expr. assert ( repr(result) - == """ 100)) or ((is_nan(float_field) and (double_field == 0)) or (float_field > 100))) and invert(is_null(double_field, {nan_is_null=false})))) or is_null(float_field, {nan_is_null=false})) or is_null(string_field, {nan_is_null=false})) or is_nan(double_field))>""" + == """ 100)) or (is_nan(float_field) and (double_field == 0))) or (float_field > 100)) and invert(is_null(double_field, {nan_is_null=false})))) or is_null(float_field, {nan_is_null=false})) or is_null(string_field, {nan_is_null=false})) or is_nan(double_field))>""" ) diff --git a/tests/table/bitmaps/64map32bitvals.bin b/tests/table/bitmaps/64map32bitvals.bin deleted file mode 100644 index 475b894417e44cff61d8810057fc1530cef05718..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 48 ocmZQ%KmaQP1_nkjmy9 diff --git a/tests/table/bitmaps/64maphighvals.bin b/tests/table/bitmaps/64maphighvals.bin deleted file mode 100644 index d4312b8d22713991026a36d5d1293cf1960d89ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1086 zcmd;PfPnY=_rj5t0RsagP#7Y>#UKD@!S*SERgUMnOxf@r{zi~~v PF5QrBO1Grj(uH&%!J7vn diff --git a/tests/table/test_datafusion.py b/tests/table/test_datafusion.py deleted file mode 100644 index d9fa3e1e7b..0000000000 --- a/tests/table/test_datafusion.py +++ /dev/null @@ -1,64 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -from pathlib import Path - -import pyarrow as pa -import pytest -from datafusion import SessionContext - -from pyiceberg.catalog import Catalog, load_catalog - - -@pytest.fixture(scope="session") -def warehouse(tmp_path_factory: pytest.TempPathFactory) -> Path: - return tmp_path_factory.mktemp("warehouse") - - -@pytest.fixture(scope="session") -def catalog(warehouse: Path) -> Catalog: - catalog = load_catalog( - "default", - uri=f"sqlite:///{warehouse}/pyiceberg_catalog.db", - warehouse=f"file://{warehouse}", - ) - return catalog - - -def test_datafusion_register_pyiceberg_table(catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - catalog.create_namespace_if_not_exists("default") - iceberg_table = catalog.create_table_if_not_exists( - "default.dataset", - schema=arrow_table_with_null.schema, - ) - iceberg_table.append(arrow_table_with_null) - - ctx = SessionContext() - ctx.register_table_provider("test", iceberg_table) - - datafusion_table = ctx.table("test") - assert datafusion_table is not None - - assert datafusion_table.to_arrow_table().to_pylist() == iceberg_table.scan().to_arrow().to_pylist() - - from pandas.testing import assert_frame_equal - - assert_frame_equal( - datafusion_table.to_arrow_table().to_pandas(), - iceberg_table.scan().to_arrow().to_pandas(), - ) diff --git a/tests/table/test_expire_snapshots.py b/tests/table/test_expire_snapshots.py deleted file mode 100644 index e2b2d47b67..0000000000 --- a/tests/table/test_expire_snapshots.py +++ /dev/null @@ -1,225 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import datetime -from unittest.mock import MagicMock -from uuid import uuid4 - -import pytest - -from pyiceberg.table import CommitTableResponse, Table - - -def test_cannot_expire_protected_head_snapshot(table_v2: Table) -> None: - """Test that a HEAD (branch) snapshot cannot be expired.""" - HEAD_SNAPSHOT = 3051729675574597004 - KEEP_SNAPSHOT = 3055729675574597004 - - # Mock the catalog's commit_table method - table_v2.catalog = MagicMock() - # Simulate refs protecting HEAD_SNAPSHOT as a branch - table_v2.metadata = table_v2.metadata.model_copy( - update={ - "refs": { - "main": MagicMock(snapshot_id=HEAD_SNAPSHOT, snapshot_ref_type="branch"), - "tag1": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="tag"), - } - } - ) - # Assert fixture data - assert any(ref.snapshot_id == HEAD_SNAPSHOT for ref in table_v2.metadata.refs.values()) - - # Attempt to expire the HEAD snapshot and expect a ValueError - with pytest.raises(ValueError, match=f"Snapshot with ID {HEAD_SNAPSHOT} is protected and cannot be expired."): - table_v2.maintenance.expire_snapshots().by_id(HEAD_SNAPSHOT).commit() - - table_v2.catalog.commit_table.assert_not_called() - - -def test_cannot_expire_tagged_snapshot(table_v2: Table) -> None: - """Test that a tagged snapshot cannot be expired.""" - TAGGED_SNAPSHOT = 3051729675574597004 - KEEP_SNAPSHOT = 3055729675574597004 - - table_v2.catalog = MagicMock() - # Simulate refs protecting TAGGED_SNAPSHOT as a tag - table_v2.metadata = table_v2.metadata.model_copy( - update={ - "refs": { - "tag1": MagicMock(snapshot_id=TAGGED_SNAPSHOT, snapshot_ref_type="tag"), - "main": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="branch"), - } - } - ) - assert any(ref.snapshot_id == TAGGED_SNAPSHOT for ref in table_v2.metadata.refs.values()) - - with pytest.raises(ValueError, match=f"Snapshot with ID {TAGGED_SNAPSHOT} is protected and cannot be expired."): - table_v2.maintenance.expire_snapshots().by_id(TAGGED_SNAPSHOT).commit() - - table_v2.catalog.commit_table.assert_not_called() - - -def test_expire_unprotected_snapshot(table_v2: Table) -> None: - """Test that an unprotected snapshot can be expired.""" - EXPIRE_SNAPSHOT = 3051729675574597004 - KEEP_SNAPSHOT = 3055729675574597004 - - mock_response = CommitTableResponse( - metadata=table_v2.metadata.model_copy(update={"snapshots": [KEEP_SNAPSHOT]}), - metadata_location="mock://metadata/location", - uuid=uuid4(), - ) - table_v2.catalog = MagicMock() - table_v2.catalog.commit_table.return_value = mock_response - - # Remove any refs that protect the snapshot to be expired - table_v2.metadata = table_v2.metadata.model_copy( - update={ - "refs": { - "main": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="branch"), - "tag1": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="tag"), - } - } - ) - - # Assert fixture data - assert all(ref.snapshot_id != EXPIRE_SNAPSHOT for ref in table_v2.metadata.refs.values()) - - # Expire the snapshot - table_v2.maintenance.expire_snapshots().by_id(EXPIRE_SNAPSHOT).commit() - - table_v2.catalog.commit_table.assert_called_once() - remaining_snapshots = table_v2.metadata.snapshots - assert EXPIRE_SNAPSHOT not in remaining_snapshots - assert len(table_v2.metadata.snapshots) == 1 - - -def test_expire_nonexistent_snapshot_raises(table_v2: Table) -> None: - """Test that trying to expire a non-existent snapshot raises an error.""" - NONEXISTENT_SNAPSHOT = 9999999999999999999 - - table_v2.catalog = MagicMock() - table_v2.metadata = table_v2.metadata.model_copy(update={"refs": {}}) - - with pytest.raises(ValueError, match=f"Snapshot with ID {NONEXISTENT_SNAPSHOT} does not exist."): - table_v2.maintenance.expire_snapshots().by_id(NONEXISTENT_SNAPSHOT).commit() - - table_v2.catalog.commit_table.assert_not_called() - - -def test_expire_snapshots_by_timestamp_skips_protected(table_v2: Table) -> None: - # Setup: two snapshots; both are old, but one is head/tag protected - HEAD_SNAPSHOT = 3051729675574597004 - TAGGED_SNAPSHOT = 3055729675574597004 - - # Add snapshots to metadata for timestamp/protected test - from types import SimpleNamespace - - table_v2.metadata = table_v2.metadata.model_copy( - update={ - "refs": { - "main": MagicMock(snapshot_id=HEAD_SNAPSHOT, snapshot_ref_type="branch"), - "mytag": MagicMock(snapshot_id=TAGGED_SNAPSHOT, snapshot_ref_type="tag"), - }, - "snapshots": [ - SimpleNamespace(snapshot_id=HEAD_SNAPSHOT, timestamp_ms=1, parent_snapshot_id=None), - SimpleNamespace(snapshot_id=TAGGED_SNAPSHOT, timestamp_ms=1, parent_snapshot_id=None), - ], - } - ) - table_v2.catalog = MagicMock() - - # Attempt to expire all snapshots before a future timestamp (so both are candidates) - future_datetime = datetime.datetime.now() + datetime.timedelta(days=1) - - # Mock the catalog's commit_table to return the current metadata (simulate no change) - mock_response = CommitTableResponse( - metadata=table_v2.metadata, # protected snapshots remain - metadata_location="mock://metadata/location", - uuid=uuid4(), - ) - table_v2.catalog.commit_table.return_value = mock_response - - table_v2.maintenance.expire_snapshots().older_than(future_datetime).commit() - # Update metadata to reflect the commit (as in other tests) - table_v2.metadata = mock_response.metadata - - # Both protected snapshots should remain - remaining_ids = {s.snapshot_id for s in table_v2.metadata.snapshots} - assert HEAD_SNAPSHOT in remaining_ids - assert TAGGED_SNAPSHOT in remaining_ids - - # No snapshots should have been expired (commit_table called, but with empty snapshot_ids) - args, kwargs = table_v2.catalog.commit_table.call_args - updates = args[2] if len(args) > 2 else () - # Find RemoveSnapshotsUpdate in updates - remove_update = next((u for u in updates if getattr(u, "action", None) == "remove-snapshots"), None) - assert remove_update is not None - assert remove_update.snapshot_ids == [] - - -def test_expire_snapshots_by_ids(table_v2: Table) -> None: - """Test that multiple unprotected snapshots can be expired by IDs.""" - EXPIRE_SNAPSHOT_1 = 3051729675574597004 - EXPIRE_SNAPSHOT_2 = 3051729675574597005 - KEEP_SNAPSHOT = 3055729675574597004 - - mock_response = CommitTableResponse( - metadata=table_v2.metadata.model_copy(update={"snapshots": [KEEP_SNAPSHOT]}), - metadata_location="mock://metadata/location", - uuid=uuid4(), - ) - table_v2.catalog = MagicMock() - table_v2.catalog.commit_table.return_value = mock_response - - # Remove any refs that protect the snapshots to be expired - table_v2.metadata = table_v2.metadata.model_copy( - update={ - "refs": { - "main": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="branch"), - "tag1": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="tag"), - } - } - ) - - # Add snapshots to metadata for multi-id test - from types import SimpleNamespace - - table_v2.metadata = table_v2.metadata.model_copy( - update={ - "refs": { - "main": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="branch"), - "tag1": MagicMock(snapshot_id=KEEP_SNAPSHOT, snapshot_ref_type="tag"), - }, - "snapshots": [ - SimpleNamespace(snapshot_id=EXPIRE_SNAPSHOT_1, timestamp_ms=1, parent_snapshot_id=None), - SimpleNamespace(snapshot_id=EXPIRE_SNAPSHOT_2, timestamp_ms=1, parent_snapshot_id=None), - SimpleNamespace(snapshot_id=KEEP_SNAPSHOT, timestamp_ms=2, parent_snapshot_id=None), - ], - } - ) - - # Assert fixture data - assert all(ref.snapshot_id not in (EXPIRE_SNAPSHOT_1, EXPIRE_SNAPSHOT_2) for ref in table_v2.metadata.refs.values()) - - # Expire the snapshots - table_v2.maintenance.expire_snapshots().by_ids([EXPIRE_SNAPSHOT_1, EXPIRE_SNAPSHOT_2]).commit() - - table_v2.catalog.commit_table.assert_called_once() - remaining_snapshots = table_v2.metadata.snapshots - assert EXPIRE_SNAPSHOT_1 not in remaining_snapshots - assert EXPIRE_SNAPSHOT_2 not in remaining_snapshots - assert len(table_v2.metadata.snapshots) == 1 diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 748a77eee0..69bbab527e 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -50,13 +50,14 @@ _match_deletes_to_data_file, ) from pyiceberg.table.metadata import INITIAL_SEQUENCE_NUMBER, TableMetadataUtil, TableMetadataV2, _generate_snapshot_id -from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef, SnapshotRefType +from pyiceberg.table.refs import SnapshotRef from pyiceberg.table.snapshots import ( MetadataLogEntry, Operation, Snapshot, SnapshotLogEntry, Summary, + ancestors_of, ) from pyiceberg.table.sorting import ( NullOrder, @@ -64,7 +65,7 @@ SortField, SortOrder, ) -from pyiceberg.table.statistics import BlobMetadata, PartitionStatisticsFile, StatisticsFile +from pyiceberg.table.statistics import BlobMetadata, StatisticsFile from pyiceberg.table.update import ( AddSnapshotUpdate, AddSortOrderUpdate, @@ -76,13 +77,11 @@ AssertLastAssignedPartitionId, AssertRefSnapshotId, AssertTableUUID, - RemovePartitionStatisticsUpdate, RemovePropertiesUpdate, RemoveSnapshotRefUpdate, RemoveSnapshotsUpdate, RemoveStatisticsUpdate, SetDefaultSortOrderUpdate, - SetPartitionStatisticsUpdate, SetPropertiesUpdate, SetSnapshotRefUpdate, SetStatisticsUpdate, @@ -226,6 +225,44 @@ def test_snapshot_by_timestamp(table_v2: Table) -> None: assert table_v2.snapshot_as_of_timestamp(1515100955770, inclusive=False) is None +def test_ancestors_of(table_v2: Table) -> None: + assert list(ancestors_of(table_v2.current_snapshot(), table_v2.metadata)) == [ + Snapshot( + snapshot_id=3055729675574597004, + parent_snapshot_id=3051729675574597004, + sequence_number=1, + timestamp_ms=1555100955770, + manifest_list="s3://a/b/2.avro", + summary=Summary(Operation.APPEND), + schema_id=1, + ), + Snapshot( + snapshot_id=3051729675574597004, + parent_snapshot_id=None, + sequence_number=0, + timestamp_ms=1515100955770, + manifest_list="s3://a/b/1.avro", + summary=Summary(Operation.APPEND), + schema_id=None, + ), + ] + + +def test_ancestors_of_recursive_error(table_v2_with_extensive_snapshots: Table) -> None: + # Test RecursionError: maximum recursion depth exceeded + assert ( + len( + list( + ancestors_of( + table_v2_with_extensive_snapshots.current_snapshot(), + table_v2_with_extensive_snapshots.metadata, + ) + ) + ) + == 2000 + ) + + def test_snapshot_by_id_does_not_exist(table_v2: Table) -> None: assert table_v2.snapshot_by_id(-1) is None @@ -346,22 +383,16 @@ def test_static_table_gz_same_as_table(table_v2: Table, metadata_location_gz: st assert static_table.metadata == table_v2.metadata -def test_static_table_version_hint_same_as_table(table_v2: Table, table_location: str) -> None: - static_table = StaticTable.from_metadata(table_location) - assert isinstance(static_table, Table) - assert static_table.metadata == table_v2.metadata - - def test_static_table_io_does_not_exist(metadata_location: str) -> None: with pytest.raises(ValueError): StaticTable.from_metadata(metadata_location, {PY_IO_IMPL: "pyiceberg.does.not.exist.FileIO"}) def test_match_deletes_to_datafile() -> None: - data_entry = ManifestEntry.from_args( + data_entry = ManifestEntry( status=ManifestEntryStatus.ADDED, sequence_number=1, - data_file=DataFile.from_args( + data_file=DataFile( content=DataFileContent.DATA, file_path="s3://bucket/0000.parquet", file_format=FileFormat.PARQUET, @@ -370,10 +401,10 @@ def test_match_deletes_to_datafile() -> None: file_size_in_bytes=3, ), ) - delete_entry_1 = ManifestEntry.from_args( + delete_entry_1 = ManifestEntry( status=ManifestEntryStatus.ADDED, sequence_number=0, # Older than the data - data_file=DataFile.from_args( + data_file=DataFile( content=DataFileContent.POSITION_DELETES, file_path="s3://bucket/0001-delete.parquet", file_format=FileFormat.PARQUET, @@ -382,10 +413,10 @@ def test_match_deletes_to_datafile() -> None: file_size_in_bytes=3, ), ) - delete_entry_2 = ManifestEntry.from_args( + delete_entry_2 = ManifestEntry( status=ManifestEntryStatus.ADDED, sequence_number=3, - data_file=DataFile.from_args( + data_file=DataFile( content=DataFileContent.POSITION_DELETES, file_path="s3://bucket/0002-delete.parquet", file_format=FileFormat.PARQUET, @@ -409,10 +440,10 @@ def test_match_deletes_to_datafile() -> None: def test_match_deletes_to_datafile_duplicate_number() -> None: - data_entry = ManifestEntry.from_args( + data_entry = ManifestEntry( status=ManifestEntryStatus.ADDED, sequence_number=1, - data_file=DataFile.from_args( + data_file=DataFile( content=DataFileContent.DATA, file_path="s3://bucket/0000.parquet", file_format=FileFormat.PARQUET, @@ -421,10 +452,10 @@ def test_match_deletes_to_datafile_duplicate_number() -> None: file_size_in_bytes=3, ), ) - delete_entry_1 = ManifestEntry.from_args( + delete_entry_1 = ManifestEntry( status=ManifestEntryStatus.ADDED, sequence_number=3, - data_file=DataFile.from_args( + data_file=DataFile( content=DataFileContent.POSITION_DELETES, file_path="s3://bucket/0001-delete.parquet", file_format=FileFormat.PARQUET, @@ -439,10 +470,10 @@ def test_match_deletes_to_datafile_duplicate_number() -> None: upper_bounds={}, ), ) - delete_entry_2 = ManifestEntry.from_args( + delete_entry_2 = ManifestEntry( status=ManifestEntryStatus.ADDED, sequence_number=3, - data_file=DataFile.from_args( + data_file=DataFile( content=DataFileContent.POSITION_DELETES, file_path="s3://bucket/0002-delete.parquet", file_format=FileFormat.PARQUET, @@ -513,15 +544,15 @@ def test_update_column(table_v1: Table, table_v2: Table) -> None: assert new_schema3.find_field("z").required is False, "failed to update existing field required" # assert the above two updates also works with union_by_name - assert table.update_schema().union_by_name(new_schema)._apply() == new_schema, ( - "failed to update existing field doc with union_by_name" - ) - assert table.update_schema().union_by_name(new_schema2)._apply() == new_schema2, ( - "failed to remove existing field doc with union_by_name" - ) - assert table.update_schema().union_by_name(new_schema3)._apply() == new_schema3, ( - "failed to update existing field required with union_by_name" - ) + assert ( + table.update_schema().union_by_name(new_schema)._apply() == new_schema + ), "failed to update existing field doc with union_by_name" + assert ( + table.update_schema().union_by_name(new_schema2)._apply() == new_schema2 + ), "failed to remove existing field doc with union_by_name" + assert ( + table.update_schema().union_by_name(new_schema3)._apply() == new_schema3 + ), "failed to update existing field required with union_by_name" def test_add_primitive_type_column(table_v2: Table) -> None: @@ -1002,42 +1033,28 @@ def test_assert_table_uuid(table_v2: Table) -> None: def test_assert_ref_snapshot_id(table_v2: Table) -> None: base_metadata = table_v2.metadata - AssertRefSnapshotId(ref=MAIN_BRANCH, snapshot_id=base_metadata.current_snapshot_id).validate(base_metadata) + AssertRefSnapshotId(ref="main", snapshot_id=base_metadata.current_snapshot_id).validate(base_metadata) with pytest.raises(CommitFailedException, match="Requirement failed: current table metadata is missing"): - AssertRefSnapshotId(ref=MAIN_BRANCH, snapshot_id=1).validate(None) + AssertRefSnapshotId(ref="main", snapshot_id=1).validate(None) with pytest.raises( CommitFailedException, - match=f"Requirement failed: branch {MAIN_BRANCH} was created concurrently", + match="Requirement failed: branch main was created concurrently", ): - AssertRefSnapshotId(ref=MAIN_BRANCH, snapshot_id=None).validate(base_metadata) + AssertRefSnapshotId(ref="main", snapshot_id=None).validate(base_metadata) with pytest.raises( CommitFailedException, - match=f"Requirement failed: branch {MAIN_BRANCH} has changed: expected id 1, found 3055729675574597004", + match="Requirement failed: branch main has changed: expected id 1, found 3055729675574597004", ): - AssertRefSnapshotId(ref=MAIN_BRANCH, snapshot_id=1).validate(base_metadata) - - non_existing_ref = "not_exist_branch_or_tag" - assert table_v2.refs().get("not_exist_branch_or_tag") is None + AssertRefSnapshotId(ref="main", snapshot_id=1).validate(base_metadata) with pytest.raises( CommitFailedException, - match=f"Requirement failed: branch or tag {non_existing_ref} is missing, expected 1", + match="Requirement failed: branch or tag not_exist is missing, expected 1", ): - AssertRefSnapshotId(ref=non_existing_ref, snapshot_id=1).validate(base_metadata) - - # existing Tag in metadata: test - ref_tag = table_v2.refs().get("test") - assert ref_tag is not None - assert ref_tag.snapshot_ref_type == SnapshotRefType.TAG, "TAG test should be present in table to be tested" - - with pytest.raises( - CommitFailedException, - match="Requirement failed: tag test has changed: expected id 3055729675574597004, found 3051729675574597004", - ): - AssertRefSnapshotId(ref="test", snapshot_id=3055729675574597004).validate(base_metadata) + AssertRefSnapshotId(ref="not_exist", snapshot_id=1).validate(base_metadata) def test_assert_last_assigned_field_id(table_v2: Table) -> None: @@ -1361,79 +1378,3 @@ def test_remove_statistics_update(table_v2_with_statistics: Table) -> None: table_v2_with_statistics.metadata, (RemoveStatisticsUpdate(snapshot_id=123456789),), ) - - -def test_set_partition_statistics_update(table_v2_with_statistics: Table) -> None: - snapshot_id = table_v2_with_statistics.metadata.current_snapshot_id - - partition_statistics_file = PartitionStatisticsFile( - snapshot_id=snapshot_id, - statistics_path="s3://bucket/warehouse/stats.puffin", - file_size_in_bytes=124, - ) - - update = SetPartitionStatisticsUpdate( - partition_statistics=partition_statistics_file, - ) - - new_metadata = update_table_metadata( - table_v2_with_statistics.metadata, - (update,), - ) - - expected = """ - { - "snapshot-id": 3055729675574597004, - "statistics-path": "s3://bucket/warehouse/stats.puffin", - "file-size-in-bytes": 124 - }""" - - assert len(new_metadata.partition_statistics) == 1 - - updated_statistics = [stat for stat in new_metadata.partition_statistics if stat.snapshot_id == snapshot_id] - - assert len(updated_statistics) == 1 - assert json.loads(updated_statistics[0].model_dump_json()) == json.loads(expected) - - -def test_remove_partition_statistics_update(table_v2_with_statistics: Table) -> None: - # Add partition statistics file. - snapshot_id = table_v2_with_statistics.metadata.current_snapshot_id - - partition_statistics_file = PartitionStatisticsFile( - snapshot_id=snapshot_id, - statistics_path="s3://bucket/warehouse/stats.puffin", - file_size_in_bytes=124, - ) - - update = SetPartitionStatisticsUpdate( - partition_statistics=partition_statistics_file, - ) - - new_metadata = update_table_metadata( - table_v2_with_statistics.metadata, - (update,), - ) - assert len(new_metadata.partition_statistics) == 1 - - # Remove the same partition statistics file. - remove_update = RemovePartitionStatisticsUpdate(snapshot_id=snapshot_id) - - remove_metadata = update_table_metadata( - new_metadata, - (remove_update,), - ) - - assert len(remove_metadata.partition_statistics) == 0 - - -def test_remove_partition_statistics_update_with_invalid_snapshot_id(table_v2_with_statistics: Table) -> None: - # Remove the same partition statistics file. - with pytest.raises( - ValueError, - match="Partition Statistics with snapshot id 123456789 does not exist", - ): - update_table_metadata( - table_v2_with_statistics.metadata, - (RemovePartitionStatisticsUpdate(snapshot_id=123456789),), - ) diff --git a/tests/table/test_locations.py b/tests/table/test_locations.py index 4efa64326a..d66bf18792 100644 --- a/tests/table/test_locations.py +++ b/tests/table/test_locations.py @@ -74,7 +74,7 @@ def test_custom_location_provider_not_found(caplog: Any) -> None: def test_object_storage_no_partition() -> None: - provider = load_location_provider(table_location="table_location", table_properties={"write.object-storage.enabled": "true"}) + provider = load_location_provider(table_location="table_location", table_properties=EMPTY_DICT) location = provider.new_data_location("test.parquet") parts = location.split("/") @@ -111,7 +111,6 @@ def test_object_storage_partitioned_paths_disabled(partition_key: Optional[Parti provider = load_location_provider( table_location="table_location", table_properties={ - "write.object-storage.enabled": "true", "write.object-storage.partitioned-paths": "false", }, ) @@ -132,7 +131,7 @@ def test_object_storage_partitioned_paths_disabled(partition_key: Optional[Parti ], ) def test_hash_injection(data_file_name: str, expected_hash: str) -> None: - provider = load_location_provider(table_location="table_location", table_properties={"write.object-storage.enabled": "true"}) + provider = load_location_provider(table_location="table_location", table_properties=EMPTY_DICT) assert provider.new_data_location(data_file_name) == f"table_location/data/{expected_hash}/{data_file_name}" @@ -140,10 +139,7 @@ def test_hash_injection(data_file_name: str, expected_hash: str) -> None: def test_object_location_provider_write_data_path() -> None: provider = load_location_provider( table_location="s3://table-location/table", - table_properties={ - "write.object-storage.enabled": "true", - TableProperties.WRITE_DATA_PATH: "s3://table-location/custom/data/path", - }, + table_properties={TableProperties.WRITE_DATA_PATH: "s3://table-location/custom/data/path"}, ) assert ( diff --git a/tests/table/test_metadata.py b/tests/table/test_metadata.py index 9141189ec5..d2ee5c3130 100644 --- a/tests/table/test_metadata.py +++ b/tests/table/test_metadata.py @@ -48,12 +48,8 @@ LongType, MapType, NestedField, - PrimitiveType, StringType, StructType, - TimestampNanoType, - TimestamptzNanoType, - UnknownType, ) @@ -173,13 +169,13 @@ def test_updating_metadata(example_table_metadata_v2: Dict[str, Any]) -> None: def test_serialize_v1(example_table_metadata_v1: Dict[str, Any]) -> None: table_metadata = TableMetadataV1(**example_table_metadata_v1) table_metadata_json = table_metadata.model_dump_json() - expected = """{"location":"s3://bucket/test/location","table-uuid":"d20125c8-7284-442c-9aea-15fee620737c","last-updated-ms":1602638573874,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]}],"current-schema-id":0,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{},"snapshots":[{"snapshot-id":1925,"timestamp-ms":1602638573822,"manifest-list":"s3://bucket/test/manifest-list"}],"snapshot-log":[],"metadata-log":[],"sort-orders":[{"order-id":0,"fields":[]}],"default-sort-order-id":0,"refs":{},"statistics":[],"partition-statistics":[],"format-version":1,"schema":{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},"partition-spec":[{"name":"x","transform":"identity","source-id":1,"field-id":1000}]}""" + expected = """{"location":"s3://bucket/test/location","table-uuid":"d20125c8-7284-442c-9aea-15fee620737c","last-updated-ms":1602638573874,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]}],"current-schema-id":0,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{},"snapshots":[{"snapshot-id":1925,"timestamp-ms":1602638573822,"manifest-list":"s3://bucket/test/manifest-list"}],"snapshot-log":[],"metadata-log":[],"sort-orders":[{"order-id":0,"fields":[]}],"default-sort-order-id":0,"refs":{},"statistics":[],"format-version":1,"schema":{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},"partition-spec":[{"name":"x","transform":"identity","source-id":1,"field-id":1000}]}""" assert table_metadata_json == expected def test_serialize_v2(example_table_metadata_v2: Dict[str, Any]) -> None: table_metadata = TableMetadataV2(**example_table_metadata_v2).model_dump_json() - expected = """{"location":"s3://bucket/test/location","table-uuid":"9c12d441-03fe-4693-9a96-a0705ddf69c1","last-updated-ms":1602638573590,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":1,"identifier-field-ids":[1,2]}],"current-schema-id":1,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{"read.split.target.size":"134217728"},"current-snapshot-id":3055729675574597004,"snapshots":[{"snapshot-id":3051729675574597004,"sequence-number":0,"timestamp-ms":1515100955770,"manifest-list":"s3://a/b/1.avro","summary":{"operation":"append"}},{"snapshot-id":3055729675574597004,"parent-snapshot-id":3051729675574597004,"sequence-number":1,"timestamp-ms":1555100955770,"manifest-list":"s3://a/b/2.avro","summary":{"operation":"append"},"schema-id":1}],"snapshot-log":[{"snapshot-id":3051729675574597004,"timestamp-ms":1515100955770},{"snapshot-id":3055729675574597004,"timestamp-ms":1555100955770}],"metadata-log":[{"metadata-file":"s3://bucket/.../v1.json","timestamp-ms":1515100}],"sort-orders":[{"order-id":3,"fields":[{"source-id":2,"transform":"identity","direction":"asc","null-order":"nulls-first"},{"source-id":3,"transform":"bucket[4]","direction":"desc","null-order":"nulls-last"}]}],"default-sort-order-id":3,"refs":{"test":{"snapshot-id":3051729675574597004,"type":"tag","max-ref-age-ms":10000000},"main":{"snapshot-id":3055729675574597004,"type":"branch"}},"statistics":[],"partition-statistics":[],"format-version":2,"last-sequence-number":34}""" + expected = """{"location":"s3://bucket/test/location","table-uuid":"9c12d441-03fe-4693-9a96-a0705ddf69c1","last-updated-ms":1602638573590,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":1,"identifier-field-ids":[1,2]}],"current-schema-id":1,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{"read.split.target.size":"134217728"},"current-snapshot-id":3055729675574597004,"snapshots":[{"snapshot-id":3051729675574597004,"sequence-number":0,"timestamp-ms":1515100955770,"manifest-list":"s3://a/b/1.avro","summary":{"operation":"append"}},{"snapshot-id":3055729675574597004,"parent-snapshot-id":3051729675574597004,"sequence-number":1,"timestamp-ms":1555100955770,"manifest-list":"s3://a/b/2.avro","summary":{"operation":"append"},"schema-id":1}],"snapshot-log":[{"snapshot-id":3051729675574597004,"timestamp-ms":1515100955770},{"snapshot-id":3055729675574597004,"timestamp-ms":1555100955770}],"metadata-log":[{"metadata-file":"s3://bucket/.../v1.json","timestamp-ms":1515100}],"sort-orders":[{"order-id":3,"fields":[{"source-id":2,"transform":"identity","direction":"asc","null-order":"nulls-first"},{"source-id":3,"transform":"bucket[4]","direction":"desc","null-order":"nulls-last"}]}],"default-sort-order-id":3,"refs":{"test":{"snapshot-id":3051729675574597004,"type":"tag","max-ref-age-ms":10000000},"main":{"snapshot-id":3055729675574597004,"type":"branch"}},"statistics":[],"format-version":2,"last-sequence-number":34}""" assert table_metadata == expected @@ -769,110 +765,3 @@ def test_make_metadata_fresh() -> None: ) assert actual.model_dump() == expected.model_dump() - - -def test_new_table_metadata_with_v3_schema() -> None: - schema = Schema( - NestedField(field_id=10, name="foo", field_type=StringType(), required=False), - NestedField(field_id=22, name="bar", field_type=IntegerType(), required=True), - NestedField(field_id=33, name="baz", field_type=BooleanType(), required=False), - NestedField(field_id=34, name="qux", field_type=TimestampNanoType(), required=False), - NestedField(field_id=35, name="quux", field_type=TimestamptzNanoType(), required=False), - schema_id=10, - identifier_field_ids=[22], - ) - - partition_spec = PartitionSpec( - PartitionField(source_id=22, field_id=1022, transform=IdentityTransform(), name="bar"), spec_id=10 - ) - - sort_order = SortOrder( - SortField(source_id=10, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST), - order_id=10, - ) - - actual = new_table_metadata( - schema=schema, - partition_spec=partition_spec, - sort_order=sort_order, - location="s3://some_v1_location/", - properties={"format-version": "3"}, - ) - - expected_schema = Schema( - NestedField(field_id=1, name="foo", field_type=StringType(), required=False), - NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), - NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), - NestedField(field_id=4, name="qux", field_type=TimestampNanoType(), required=False), - NestedField(field_id=5, name="quux", field_type=TimestamptzNanoType(), required=False), - schema_id=0, - identifier_field_ids=[2], - ) - - expected_spec = PartitionSpec(PartitionField(source_id=2, field_id=1000, transform=IdentityTransform(), name="bar")) - - expected_sort_order = SortOrder( - SortField(source_id=1, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST), - order_id=1, - ) - - expected = TableMetadataV3( - location="s3://some_v1_location/", - table_uuid=actual.table_uuid, - last_updated_ms=actual.last_updated_ms, - last_column_id=5, - schemas=[expected_schema], - schema_=expected_schema, - current_schema_id=0, - partition_spec=[field.model_dump() for field in expected_spec.fields], - partition_specs=[expected_spec], - default_spec_id=0, - last_partition_id=1000, - properties={}, - current_snapshot_id=None, - snapshots=[], - snapshot_log=[], - metadata_log=[], - sort_orders=[expected_sort_order], - default_sort_order_id=1, - refs={}, - format_version=3, - ) - - assert actual.model_dump() == expected.model_dump() - assert actual.schemas == [expected_schema] - assert actual.partition_specs == [expected_spec] - assert actual.sort_orders == [expected_sort_order] - - -@pytest.mark.parametrize( - "field_type", - [ - TimestampNanoType(), - TimestamptzNanoType(), - UnknownType(), - ], -) -def test_new_table_metadata_format_v2_with_v3_schema_fails(field_type: PrimitiveType) -> None: - schema = Schema( - NestedField(field_id=34, name="qux", field_type=field_type, required=False), - schema_id=10, - ) - - partition_spec = PartitionSpec( - PartitionField(source_id=34, field_id=1022, transform=IdentityTransform(), name="qux"), spec_id=10 - ) - - sort_order = SortOrder( - SortField(source_id=34, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST), - order_id=34, - ) - - with pytest.raises(ValueError, match=f"{field_type} is only supported in 3 or higher. Current format version is: 2"): - new_table_metadata( - schema=schema, - partition_spec=partition_spec, - sort_order=sort_order, - location="s3://some_v1_location/", - properties={"format-version": "2"}, - ) diff --git a/tests/table/test_partitioning.py b/tests/table/test_partitioning.py index 0fe22391c0..edda6d3aa8 100644 --- a/tests/table/test_partitioning.py +++ b/tests/table/test_partitioning.py @@ -158,7 +158,7 @@ def test_partition_spec_to_path() -> None: spec_id=3, ) - record = Record("my+str", "( )", 10) + record = Record(**{"my#str%bucket": "my+str", "other str+bucket": "( )", "my!int:bucket": 10}) # type: ignore # Both partition field names and values should be URL encoded, with spaces mapping to plus signs, to match the Java # behaviour: https://github.com/apache/iceberg/blob/ca3db931b0f024f0412084751ac85dd4ef2da7e7/api/src/main/java/org/apache/iceberg/PartitionSpec.java#L198-L204 @@ -186,8 +186,8 @@ def test_partition_type(table_schema_simple: Schema) -> None: (DecimalType(5, 9), Decimal(19.25)), (DateType(), datetime.date(1925, 5, 22)), (TimeType(), datetime.time(19, 25, 00)), - (TimestampType(), datetime.datetime(2022, 5, 1, 22, 1, 1)), - (TimestamptzType(), datetime.datetime(2022, 5, 1, 22, 1, 1, tzinfo=datetime.timezone.utc)), + (TimestampType(), datetime.datetime(19, 5, 1, 22, 1, 1)), + (TimestamptzType(), datetime.datetime(19, 5, 1, 22, 1, 1, tzinfo=datetime.timezone.utc)), (StringType(), "abc"), (UUIDType(), UUID("12345678-1234-5678-1234-567812345678").bytes), (FixedType(5), 'b"\x8e\xd1\x87\x01"'), @@ -208,7 +208,13 @@ def test_transform_consistency_with_pyarrow_transform(source_type: PrimitiveType ] for t in all_transforms: if t.can_transform(source_type): - assert t.transform(source_type)(value) == t.pyarrow_transform(source_type)(pa.array([value])).to_pylist()[0] + try: + assert t.transform(source_type)(value) == t.pyarrow_transform(source_type)(pa.array([value])).to_pylist()[0] + except ValueError as e: + # Skipping unsupported feature + if "FeatureUnsupported => Unsupported data type for truncate transform" in str(e): + continue + raise def test_deserialize_partition_field_v2() -> None: diff --git a/tests/table/test_puffin.py b/tests/table/test_puffin.py deleted file mode 100644 index 2140915389..0000000000 --- a/tests/table/test_puffin.py +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from os import path -from typing import List - -import pytest -from pyroaring import BitMap - -from pyiceberg.table.puffin import _deserialize_bitmap - - -def _open_file(file: str) -> bytes: - cur_dir = path.dirname(path.realpath(__file__)) - with open(f"{cur_dir}/bitmaps/{file}", "rb") as f: - return f.read() - - -def test_map_empty() -> None: - puffin = _open_file("64mapempty.bin") - - expected: List[BitMap] = [] - actual = _deserialize_bitmap(puffin) - - assert expected == actual - - -def test_map_bitvals() -> None: - puffin = _open_file("64map32bitvals.bin") - - expected = [BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])] - actual = _deserialize_bitmap(puffin) - - assert expected == actual - - -def test_map_spread_vals() -> None: - puffin = _open_file("64mapspreadvals.bin") - - expected = [ - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - BitMap([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), - ] - actual = _deserialize_bitmap(puffin) - - assert expected == actual - - -def test_map_high_vals() -> None: - puffin = _open_file("64maphighvals.bin") - - with pytest.raises(ValueError, match="Key 4022190063 is too large, max 2147483647 to maintain compatibility with Java impl"): - _ = _deserialize_bitmap(puffin) diff --git a/tests/table/test_snapshots.py b/tests/table/test_snapshots.py index d26562ad8f..b4dde217d4 100644 --- a/tests/table/test_snapshots.py +++ b/tests/table/test_snapshots.py @@ -15,23 +15,12 @@ # specific language governing permissions and limitations # under the License. # pylint:disable=redefined-outer-name,eval-used -from typing import cast - import pytest from pyiceberg.manifest import DataFile, DataFileContent, ManifestContent, ManifestFile from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table import Table -from pyiceberg.table.snapshots import ( - Operation, - Snapshot, - SnapshotSummaryCollector, - Summary, - ancestors_between, - ancestors_of, - update_snapshot_summaries, -) +from pyiceberg.table.snapshots import Operation, Snapshot, SnapshotSummaryCollector, Summary, update_snapshot_summaries from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import Record from pyiceberg.types import ( @@ -154,7 +143,7 @@ def test_snapshot_with_properties_repr(snapshot_with_properties: Snapshot) -> No @pytest.fixture def manifest_file() -> ManifestFile: - return ManifestFile.from_args( + return ManifestFile( content=ManifestContent.DATA, manifest_length=100, added_files_count=1, @@ -171,7 +160,7 @@ def test_snapshot_summary_collector(table_schema_simple: Schema) -> None: ssc = SnapshotSummaryCollector() assert ssc.build() == {} - data_file = DataFile.from_args(content=DataFileContent.DATA, record_count=100, file_size_in_bytes=1234, partition=Record()) + data_file = DataFile(content=DataFileContent.DATA, record_count=100, file_size_in_bytes=1234, partition=Record()) ssc.add_file(data_file, schema=table_schema_simple) assert ssc.build() == { @@ -194,8 +183,8 @@ def test_snapshot_summary_collector_with_partition() -> None: NestedField(field_id=3, name="int_field", field_type=IntegerType(), required=False), ) spec = PartitionSpec(PartitionField(source_id=3, field_id=1001, transform=IdentityTransform(), name="int_field")) - data_file_1 = DataFile.from_args(content=DataFileContent.DATA, record_count=100, file_size_in_bytes=1234, partition=Record(1)) - data_file_2 = DataFile.from_args(content=DataFileContent.DATA, record_count=200, file_size_in_bytes=4321, partition=Record(2)) + data_file_1 = DataFile(content=DataFileContent.DATA, record_count=100, file_size_in_bytes=1234, partition=Record(int_field=1)) + data_file_2 = DataFile(content=DataFileContent.DATA, record_count=200, file_size_in_bytes=4321, partition=Record(int_field=2)) # When ssc.add_file(data_file=data_file_1, schema=schema, partition_spec=spec) ssc.remove_file(data_file=data_file_1, schema=schema, partition_spec=spec) @@ -224,68 +213,11 @@ def test_snapshot_summary_collector_with_partition() -> None: "added-records": "100", "deleted-records": "300", "changed-partition-count": "2", - "partition-summaries-included": "true", "partitions.int_field=1": "added-files-size=1234,removed-files-size=1234,added-data-files=1,deleted-data-files=1,added-records=100,deleted-records=100", "partitions.int_field=2": "removed-files-size=4321,deleted-data-files=1,deleted-records=200", } -@pytest.mark.integration -def test_snapshot_summary_collector_with_partition_limit_in_constructor() -> None: - # Given - partition_summary_limit = 10 - ssc = SnapshotSummaryCollector(partition_summary_limit=partition_summary_limit) - - assert ssc.build() == {} - schema = Schema( - NestedField(field_id=1, name="bool_field", field_type=BooleanType(), required=False), - NestedField(field_id=2, name="string_field", field_type=StringType(), required=False), - NestedField(field_id=3, name="int_field", field_type=IntegerType(), required=False), - ) - spec = PartitionSpec(PartitionField(source_id=3, field_id=1001, transform=IdentityTransform(), name="int_field")) - data_file_1 = DataFile.from_args(content=DataFileContent.DATA, record_count=100, file_size_in_bytes=1234, partition=Record(1)) - data_file_2 = DataFile.from_args(content=DataFileContent.DATA, record_count=200, file_size_in_bytes=4321, partition=Record(2)) - - # When - ssc.add_file(data_file=data_file_1, schema=schema, partition_spec=spec) - ssc.remove_file(data_file=data_file_1, schema=schema, partition_spec=spec) - ssc.remove_file(data_file=data_file_2, schema=schema, partition_spec=spec) - - # Then - assert ssc.build() == { - "added-files-size": "1234", - "removed-files-size": "5555", - "added-data-files": "1", - "deleted-data-files": "2", - "added-records": "100", - "deleted-records": "300", - "changed-partition-count": "2", - "partition-summaries-included": "true", - "partitions.int_field=1": "added-files-size=1234,removed-files-size=1234,added-data-files=1,deleted-data-files=1,added-records=100,deleted-records=100", - "partitions.int_field=2": "removed-files-size=4321,deleted-data-files=1,deleted-records=200", - } - - -@pytest.mark.integration -def test_partition_summaries_included_not_set_when_no_change() -> None: - ssc = SnapshotSummaryCollector() - # No files added, so no partition_metrics - ssc.set_partition_summary_limit(10) - result = ssc.build() - assert "partition-summaries-included" not in result - assert result == {} # Should be empty dict - - -@pytest.mark.integration -def test_partition_summaries_included_not_set_when_unpartitioned_files(table_schema_simple: Schema) -> None: - ssc = SnapshotSummaryCollector() - data_file = DataFile.from_args(content=DataFileContent.DATA, record_count=100, file_size_in_bytes=1234, partition=Record()) - ssc.add_file(data_file, schema=table_schema_simple) - ssc.set_partition_summary_limit(10) - result = ssc.build() - assert "partition-summaries-included" not in result - - def test_merge_snapshot_summaries_empty() -> None: assert update_snapshot_summaries(Summary(Operation.APPEND)) == Summary( operation=Operation.APPEND, @@ -357,6 +289,7 @@ def test_merge_snapshot_summaries_overwrite_summary() -> None: "total-position-deletes": "1", "total-records": "1", }, + truncate_full_table=True, ) expected = { @@ -366,12 +299,18 @@ def test_merge_snapshot_summaries_overwrite_summary() -> None: "added-files-size": "4", "added-position-deletes": "5", "added-records": "6", - "total-data-files": "2", - "total-delete-files": "3", - "total-records": "7", - "total-files-size": "5", - "total-position-deletes": "6", - "total-equality-deletes": "4", + "total-data-files": "1", + "total-records": "6", + "total-delete-files": "2", + "total-equality-deletes": "3", + "total-files-size": "4", + "total-position-deletes": "5", + "deleted-data-files": "1", + "removed-delete-files": "1", + "deleted-records": "1", + "removed-files-size": "1", + "removed-position-deletes": "1", + "removed-equality-deletes": "1", } assert actual.additional_properties == expected @@ -398,61 +337,7 @@ def test_invalid_type() -> None: }, ), previous_summary={"total-data-files": "abc"}, # should be a number + truncate_full_table=True, ) assert "Could not parse summary property total-data-files to an int: abc" in str(e.value) - - -def test_ancestors_of(table_v2: Table) -> None: - assert list(ancestors_of(table_v2.current_snapshot(), table_v2.metadata)) == [ - Snapshot( - snapshot_id=3055729675574597004, - parent_snapshot_id=3051729675574597004, - sequence_number=1, - timestamp_ms=1555100955770, - manifest_list="s3://a/b/2.avro", - summary=Summary(Operation.APPEND), - schema_id=1, - ), - Snapshot( - snapshot_id=3051729675574597004, - parent_snapshot_id=None, - sequence_number=0, - timestamp_ms=1515100955770, - manifest_list="s3://a/b/1.avro", - summary=Summary(Operation.APPEND), - schema_id=None, - ), - ] - - -def test_ancestors_of_recursive_error(table_v2_with_extensive_snapshots: Table) -> None: - # Test RecursionError: maximum recursion depth exceeded - assert ( - len( - list( - ancestors_of( - table_v2_with_extensive_snapshots.current_snapshot(), - table_v2_with_extensive_snapshots.metadata, - ) - ) - ) - == 2000 - ) - - -def test_ancestors_between(table_v2_with_extensive_snapshots: Table) -> None: - oldest_snapshot = table_v2_with_extensive_snapshots.snapshots()[0] - current_snapshot = cast(Snapshot, table_v2_with_extensive_snapshots.current_snapshot()) - assert ( - len( - list( - ancestors_between( - oldest_snapshot, - current_snapshot, - table_v2_with_extensive_snapshots.metadata, - ) - ) - ) - == 2000 - ) diff --git a/tests/table/test_statistics.py b/tests/table/test_statistics.py deleted file mode 100644 index 6c91990ea3..0000000000 --- a/tests/table/test_statistics.py +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from pyiceberg.table.statistics import BlobMetadata, PartitionStatisticsFile, StatisticsFile - - -def test_partition_statistics_file() -> None: - partition_statistics_file_json = ( - """{"snapshot-id":123,"statistics-path":"s3://bucket/statistics.parquet","file-size-in-bytes":345}""" - ) - partition_statistics_file = PartitionStatisticsFile.model_validate_json(partition_statistics_file_json) - - assert partition_statistics_file == PartitionStatisticsFile( - snapshot_id=123, statistics_path="s3://bucket/statistics.parquet", file_size_in_bytes=345 - ) - - assert partition_statistics_file.model_dump_json() == partition_statistics_file_json - - -def test_statistics_file() -> None: - statistics_file_json = """{"snapshot-id":123,"statistics-path":"s3://bucket/statistics.parquet","file-size-in-bytes":345,"file-footer-size-in-bytes":456,"blob-metadata":[{"type":"apache-datasketches-theta-v1","snapshot-id":567,"sequence-number":22,"fields":[1,2,3],"properties":{"foo":"bar"}}]}""" - statistics_file = StatisticsFile.model_validate_json(statistics_file_json) - - assert statistics_file == StatisticsFile( - snapshot_id=123, - statistics_path="s3://bucket/statistics.parquet", - file_size_in_bytes=345, - file_footer_size_in_bytes=456, - key_metadata=None, - blob_metadata=[ - BlobMetadata( - type="apache-datasketches-theta-v1", - snapshot_id=567, - sequence_number=22, - fields=[1, 2, 3], - properties={"foo": "bar"}, - ) - ], - ) - - assert statistics_file.model_dump_json() == statistics_file_json diff --git a/tests/table/test_upsert.py b/tests/table/test_upsert.py index cc6e008b1e..0cfb0ba609 100644 --- a/tests/table/test_upsert.py +++ b/tests/table/test_upsert.py @@ -23,14 +23,9 @@ from pyiceberg.catalog import Catalog from pyiceberg.exceptions import NoSuchTableError -from pyiceberg.expressions import AlwaysTrue, And, EqualTo, Reference -from pyiceberg.expressions.literals import LongLiteral -from pyiceberg.io.pyarrow import schema_to_pyarrow from pyiceberg.schema import Schema from pyiceberg.table import UpsertResult -from pyiceberg.table.snapshots import Operation -from pyiceberg.table.upsert_util import create_match_filter -from pyiceberg.types import IntegerType, NestedField, StringType, StructType +from pyiceberg.types import IntegerType, NestedField, StringType from tests.catalog.test_base import InMemoryCatalog, Table @@ -83,7 +78,7 @@ def gen_source_dataset(start_row: int, end_row: int, composite_key: bool, add_du ) sql = f""" - with t as (SELECT unnest(range({start_row},{end_row + 1})) as order_id) + with t as (SELECT unnest(range({start_row},{end_row+1})) as order_id) SELECT t.order_id {additional_columns} , date '2021-01-01' as order_date, 'B' as order_type from t @@ -101,7 +96,7 @@ def gen_target_iceberg_table( additional_columns = ", t.order_id + 1000 as order_line_id" if composite_key else "" df = ctx.sql(f""" - with t as (SELECT unnest(range({start_row},{end_row + 1})) as order_id) + with t as (SELECT unnest(range({start_row},{end_row+1})) as order_id) SELECT t.order_id {additional_columns} , date '2021-01-01' as order_date, 'A' as order_type from t @@ -328,67 +323,6 @@ def test_upsert_with_identifier_fields(catalog: Catalog) -> None: identifier = "default.test_upsert_with_identifier_fields" _drop_table(catalog, identifier) - schema = Schema( - NestedField(1, "city", StringType(), required=True), - NestedField(2, "population", IntegerType(), required=True), - # Mark City as the identifier field, also known as the primary-key - identifier_field_ids=[1], - ) - - tbl = catalog.create_table(identifier, schema=schema) - - arrow_schema = pa.schema( - [ - pa.field("city", pa.string(), nullable=False), - pa.field("population", pa.int32(), nullable=False), - ] - ) - - # Write some data - df = pa.Table.from_pylist( - [ - {"city": "Amsterdam", "population": 921402}, - {"city": "San Francisco", "population": 808988}, - {"city": "Drachten", "population": 45019}, - {"city": "Paris", "population": 2103000}, - ], - schema=arrow_schema, - ) - tbl.append(df) - - df = pa.Table.from_pylist( - [ - # Will be updated, the population has been updated - {"city": "Drachten", "population": 45505}, - # New row, will be inserted - {"city": "Berlin", "population": 3432000}, - # Ignored, already exists in the table - {"city": "Paris", "population": 2103000}, - ], - schema=arrow_schema, - ) - upd = tbl.upsert(df) - - expected_operations = [Operation.APPEND, Operation.OVERWRITE, Operation.APPEND, Operation.APPEND] - - assert upd.rows_updated == 1 - assert upd.rows_inserted == 1 - - assert [snap.summary.operation for snap in tbl.snapshots() if snap.summary is not None] == expected_operations - - # This should be a no-op - upd = tbl.upsert(df) - - assert upd.rows_updated == 0 - assert upd.rows_inserted == 0 - - assert [snap.summary.operation for snap in tbl.snapshots() if snap.summary is not None] == expected_operations - - -def test_upsert_into_empty_table(catalog: Catalog) -> None: - identifier = "default.test_upsert_into_empty_table" - _drop_table(catalog, identifier) - schema = Schema( NestedField(1, "city", StringType(), required=True), NestedField(2, "inhabitants", IntegerType(), required=True), @@ -415,358 +349,20 @@ def test_upsert_into_empty_table(catalog: Catalog) -> None: ], schema=arrow_schema, ) - upd = tbl.upsert(df) - - assert upd.rows_updated == 0 - assert upd.rows_inserted == 4 - - -def test_create_match_filter_single_condition() -> None: - """ - Test create_match_filter with a composite key where the source yields exactly one unique key. - Expected: The function returns the single And condition directly. - """ - - data = [ - {"order_id": 101, "order_line_id": 1, "extra": "x"}, - {"order_id": 101, "order_line_id": 1, "extra": "x"}, # duplicate - ] - schema = pa.schema([pa.field("order_id", pa.int32()), pa.field("order_line_id", pa.int32()), pa.field("extra", pa.string())]) - table = pa.Table.from_pylist(data, schema=schema) - expr = create_match_filter(table, ["order_id", "order_line_id"]) - assert expr == And( - EqualTo(term=Reference(name="order_id"), literal=LongLiteral(101)), - EqualTo(term=Reference(name="order_line_id"), literal=LongLiteral(1)), - ) - - -def test_upsert_with_duplicate_rows_in_table(catalog: Catalog) -> None: - identifier = "default.test_upsert_with_duplicate_rows_in_table" - - _drop_table(catalog, identifier) - schema = Schema( - NestedField(1, "city", StringType(), required=True), - NestedField(2, "inhabitants", IntegerType(), required=True), - # Mark City as the identifier field, also known as the primary-key - identifier_field_ids=[1], - ) - - tbl = catalog.create_table(identifier, schema=schema) - - arrow_schema = pa.schema( - [ - pa.field("city", pa.string(), nullable=False), - pa.field("inhabitants", pa.int32(), nullable=False), - ] - ) - - # Write some data - df = pa.Table.from_pylist( - [ - {"city": "Drachten", "inhabitants": 45019}, - {"city": "Drachten", "inhabitants": 45019}, - ], - schema=arrow_schema, - ) tbl.append(df) df = pa.Table.from_pylist( [ # Will be updated, the inhabitants has been updated {"city": "Drachten", "inhabitants": 45505}, + # New row, will be inserted + {"city": "Berlin", "inhabitants": 3432000}, + # Ignored, already exists in the table + {"city": "Paris", "inhabitants": 2103000}, ], schema=arrow_schema, ) + upd = tbl.upsert(df) - with pytest.raises(ValueError, match="Target table has duplicate rows, aborting upsert"): - _ = tbl.upsert(df) - - -def test_upsert_without_identifier_fields(catalog: Catalog) -> None: - identifier = "default.test_upsert_without_identifier_fields" - _drop_table(catalog, identifier) - - schema = Schema( - NestedField(1, "city", StringType(), required=True), - NestedField(2, "population", IntegerType(), required=True), - # No identifier field :o - identifier_field_ids=[], - ) - - tbl = catalog.create_table(identifier, schema=schema) - # Write some data - df = pa.Table.from_pylist( - [ - {"city": "Amsterdam", "population": 921402}, - {"city": "San Francisco", "population": 808988}, - {"city": "Drachten", "population": 45019}, - {"city": "Paris", "population": 2103000}, - ], - schema=schema_to_pyarrow(schema), - ) - - with pytest.raises( - ValueError, match="Join columns could not be found, please set identifier-field-ids or pass in explicitly." - ): - tbl.upsert(df) - - -def test_upsert_with_struct_field_as_non_join_key(catalog: Catalog) -> None: - identifier = "default.test_upsert_struct_field_fails" - _drop_table(catalog, identifier) - - schema = Schema( - NestedField(1, "id", IntegerType(), required=True), - NestedField( - 2, - "nested_type", - StructType( - NestedField(3, "sub1", StringType(), required=True), - NestedField(4, "sub2", StringType(), required=True), - ), - required=False, - ), - identifier_field_ids=[1], - ) - - tbl = catalog.create_table(identifier, schema=schema) - - arrow_schema = pa.schema( - [ - pa.field("id", pa.int32(), nullable=False), - pa.field( - "nested_type", - pa.struct( - [ - pa.field("sub1", pa.large_string(), nullable=False), - pa.field("sub2", pa.large_string(), nullable=False), - ] - ), - nullable=True, - ), - ] - ) - - initial_data = pa.Table.from_pylist( - [ - { - "id": 1, - "nested_type": {"sub1": "bla1", "sub2": "bla"}, - } - ], - schema=arrow_schema, - ) - tbl.append(initial_data) - - update_data = pa.Table.from_pylist( - [ - { - "id": 2, - "nested_type": {"sub1": "bla1", "sub2": "bla"}, - }, - { - "id": 1, - "nested_type": {"sub1": "bla1", "sub2": "bla2"}, - }, - ], - schema=arrow_schema, - ) - - res = tbl.upsert(update_data, join_cols=["id"]) - - expected_updated = 1 - expected_inserted = 1 - - assert_upsert_result(res, expected_updated, expected_inserted) - - update_data = pa.Table.from_pylist( - [ - { - "id": 2, - "nested_type": {"sub1": "bla1", "sub2": "bla"}, - }, - { - "id": 1, - "nested_type": {"sub1": "bla1", "sub2": "bla2"}, - }, - ], - schema=arrow_schema, - ) - - res = tbl.upsert(update_data, join_cols=["id"]) - - expected_updated = 0 - expected_inserted = 0 - - assert_upsert_result(res, expected_updated, expected_inserted) - - -def test_upsert_with_struct_field_as_join_key(catalog: Catalog) -> None: - identifier = "default.test_upsert_with_struct_field_as_join_key" - _drop_table(catalog, identifier) - - schema = Schema( - NestedField(1, "id", IntegerType(), required=True), - NestedField( - 2, - "nested_type", - StructType( - NestedField(3, "sub1", StringType(), required=True), - NestedField(4, "sub2", StringType(), required=True), - ), - required=False, - ), - identifier_field_ids=[1], - ) - - tbl = catalog.create_table(identifier, schema=schema) - - arrow_schema = pa.schema( - [ - pa.field("id", pa.int32(), nullable=False), - pa.field( - "nested_type", - pa.struct( - [ - pa.field("sub1", pa.large_string(), nullable=False), - pa.field("sub2", pa.large_string(), nullable=False), - ] - ), - nullable=True, - ), - ] - ) - - initial_data = pa.Table.from_pylist( - [ - { - "id": 1, - "nested_type": {"sub1": "bla1", "sub2": "bla"}, - } - ], - schema=arrow_schema, - ) - tbl.append(initial_data) - - update_data = pa.Table.from_pylist( - [ - { - "id": 2, - "nested_type": {"sub1": "bla1", "sub2": "bla"}, - }, - { - "id": 1, - "nested_type": {"sub1": "bla1", "sub2": "bla"}, - }, - ], - schema=arrow_schema, - ) - - with pytest.raises( - pa.lib.ArrowNotImplementedError, match="Keys of type struct" - ): - _ = tbl.upsert(update_data, join_cols=["nested_type"]) - - -def test_upsert_with_nulls(catalog: Catalog) -> None: - identifier = "default.test_upsert_with_nulls" - _drop_table(catalog, identifier) - - schema = pa.schema( - [ - ("foo", pa.string()), - ("bar", pa.int32()), - ("baz", pa.bool_()), - ] - ) - - # create table with null value - table = catalog.create_table(identifier, schema) - data_with_null = pa.Table.from_pylist( - [ - {"foo": "apple", "bar": None, "baz": False}, - {"foo": "banana", "bar": None, "baz": False}, - ], - schema=schema, - ) - table.append(data_with_null) - assert table.scan().to_arrow()["bar"].is_null() - - # upsert table with non-null value - data_without_null = pa.Table.from_pylist( - [ - {"foo": "apple", "bar": 7, "baz": False}, - ], - schema=schema, - ) - upd = table.upsert(data_without_null, join_cols=["foo"]) assert upd.rows_updated == 1 - assert upd.rows_inserted == 0 - assert table.scan().to_arrow() == pa.Table.from_pylist( - [ - {"foo": "apple", "bar": 7, "baz": False}, - {"foo": "banana", "bar": None, "baz": False}, - ], - schema=schema, - ) - - -def test_transaction(catalog: Catalog) -> None: - """Test the upsert within a Transaction. Make sure that if something fails the entire Transaction is - rolled back.""" - identifier = "default.test_merge_source_dups" - _drop_table(catalog, identifier) - - ctx = SessionContext() - - table = gen_target_iceberg_table(1, 10, False, ctx, catalog, identifier) - df_before_transaction = table.scan().to_arrow() - - source_df = gen_source_dataset(5, 15, False, True, ctx) - - with pytest.raises(Exception, match="Duplicate rows found in source dataset based on the key columns. No upsert executed"): - with table.transaction() as tx: - tx.delete(delete_filter=AlwaysTrue()) - tx.upsert(df=source_df, join_cols=["order_id"]) - - df = table.scan().to_arrow() - - assert df_before_transaction == df - - -def test_transaction_multiple_upserts(catalog: Catalog) -> None: - identifier = "default.test_multi_upsert" - _drop_table(catalog, identifier) - - schema = Schema( - NestedField(1, "id", IntegerType(), required=True), - NestedField(2, "name", StringType(), required=True), - identifier_field_ids=[1], - ) - - tbl = catalog.create_table(identifier, schema=schema) - - # Define exact schema: required int32 and required string - arrow_schema = pa.schema( - [ - pa.field("id", pa.int32(), nullable=False), - pa.field("name", pa.string(), nullable=False), - ] - ) - - tbl.append(pa.Table.from_pylist([{"id": 1, "name": "Alice"}], schema=arrow_schema)) - - df = pa.Table.from_pylist([{"id": 2, "name": "Bob"}, {"id": 1, "name": "Alicia"}], schema=arrow_schema) - - with tbl.transaction() as txn: - txn.delete(delete_filter="id = 1") - txn.append(df) - - # This should read the uncommitted changes - txn.upsert(df, join_cols=["id"]) - - result = tbl.scan().to_arrow().to_pylist() - assert sorted(result, key=lambda x: x["id"]) == [ - {"id": 1, "name": "Alicia"}, - {"id": 2, "name": "Bob"}, - ] + assert upd.rows_inserted == 1 diff --git a/tests/table/test_validate.py b/tests/table/test_validate.py deleted file mode 100644 index 570f680860..0000000000 --- a/tests/table/test_validate.py +++ /dev/null @@ -1,352 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# pylint:disable=redefined-outer-name,eval-used -from typing import cast -from unittest.mock import patch - -import pytest - -from pyiceberg.exceptions import ValidationException -from pyiceberg.io import FileIO -from pyiceberg.manifest import ManifestContent, ManifestEntry, ManifestEntryStatus, ManifestFile -from pyiceberg.table import Table -from pyiceberg.table.snapshots import Operation, Snapshot, Summary -from pyiceberg.table.update.validate import ( - _added_data_files, - _deleted_data_files, - _validate_added_data_files, - _validate_deleted_data_files, - _validation_history, -) - - -@pytest.fixture -def table_v2_with_extensive_snapshots_and_manifests( - table_v2_with_extensive_snapshots: Table, -) -> tuple[Table, dict[int, list[ManifestFile]]]: - """Fixture to create a table with extensive snapshots and manifests.""" - mock_manifests = {} - - for i, snapshot in enumerate(table_v2_with_extensive_snapshots.snapshots()): - mock_manifest = ManifestFile.from_args( - manifest_path=f"foo/bar/{i}", - manifest_length=1, - partition_spec_id=1, - content=ManifestContent.DATA if i % 2 == 0 else ManifestContent.DELETES, - sequence_number=1, - min_sequence_number=1, - added_snapshot_id=snapshot.snapshot_id, - ) - - # Store the manifest for this specific snapshot - mock_manifests[snapshot.snapshot_id] = [mock_manifest] - - return table_v2_with_extensive_snapshots, mock_manifests - - -def test_validation_history(table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]]) -> None: - """Test the validation history function.""" - table, mock_manifests = table_v2_with_extensive_snapshots_and_manifests - - expected_manifest_data_counts = len([m for m in mock_manifests.values() if m[0].content == ManifestContent.DATA]) - - oldest_snapshot = table.snapshots()[0] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - def mock_read_manifest_side_effect(self: Snapshot, io: FileIO) -> list[ManifestFile]: - """Mock the manifests method to use the snapshot_id for lookup.""" - snapshot_id = self.snapshot_id - if snapshot_id in mock_manifests: - return mock_manifests[snapshot_id] - return [] - - with patch("pyiceberg.table.snapshots.Snapshot.manifests", new=mock_read_manifest_side_effect): - manifests, snapshots = _validation_history( - table, - oldest_snapshot, - newest_snapshot, - {Operation.APPEND}, - ManifestContent.DATA, - ) - - assert len(manifests) == expected_manifest_data_counts - - -def test_validation_history_fails_on_snapshot_with_no_summary( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], -) -> None: - """Test the validation history function fails on snapshot with no summary.""" - table, _ = table_v2_with_extensive_snapshots_and_manifests - oldest_snapshot = table.snapshots()[0] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - # Create a snapshot with no summary - snapshot_with_no_summary = Snapshot( - snapshot_id="1234", - parent_id="5678", - timestamp_ms=0, - operation=Operation.APPEND, - summary=None, - manifest_list="foo/bar", - ) - with patch("pyiceberg.table.update.validate.ancestors_between", return_value=[snapshot_with_no_summary]): - with pytest.raises(ValidationException): - _validation_history( - table, - oldest_snapshot, - newest_snapshot, - {Operation.APPEND}, - ManifestContent.DATA, - ) - - -def test_validation_history_fails_on_from_snapshot_not_matching_last_snapshot( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], -) -> None: - """Test the validation history function fails when from_snapshot doesn't match last_snapshot.""" - table, mock_manifests = table_v2_with_extensive_snapshots_and_manifests - - oldest_snapshot = table.snapshots()[0] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - def mock_read_manifest_side_effect(self: Snapshot, io: FileIO) -> list[ManifestFile]: - """Mock the manifests method to use the snapshot_id for lookup.""" - snapshot_id = self.snapshot_id - if snapshot_id in mock_manifests: - return mock_manifests[snapshot_id] - return [] - - missing_oldest_snapshot = table.snapshots()[1:] - - with patch("pyiceberg.table.snapshots.Snapshot.manifests", new=mock_read_manifest_side_effect): - with patch("pyiceberg.table.update.validate.ancestors_between", return_value=missing_oldest_snapshot): - with pytest.raises(ValidationException): - _validation_history( - table, - oldest_snapshot, - newest_snapshot, - {Operation.APPEND}, - ManifestContent.DATA, - ) - - -def test_deleted_data_files( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], -) -> None: - table, mock_manifests = table_v2_with_extensive_snapshots_and_manifests - - oldest_snapshot = table.snapshots()[0] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - def mock_read_manifest_side_effect(self: Snapshot, io: FileIO) -> list[ManifestFile]: - """Mock the manifests method to use the snapshot_id for lookup.""" - snapshot_id = self.snapshot_id - if snapshot_id in mock_manifests: - return mock_manifests[snapshot_id] - return [] - - # every snapshot is an append, so we should get nothing! - with patch("pyiceberg.table.snapshots.Snapshot.manifests", new=mock_read_manifest_side_effect): - result = list( - _deleted_data_files( - table=table, - starting_snapshot=newest_snapshot, - data_filter=None, - parent_snapshot=oldest_snapshot, - partition_set=None, - ) - ) - - assert result == [] - - # modify second to last snapshot to be a delete - snapshots = table.snapshots() - altered_snapshot = snapshots[-2] - altered_snapshot = altered_snapshot.model_copy(update={"summary": Summary(operation=Operation.DELETE)}) - snapshots[-2] = altered_snapshot - - table.metadata = table.metadata.model_copy( - update={"snapshots": snapshots}, - ) - - my_entry = ManifestEntry.from_args( - status=ManifestEntryStatus.DELETED, - snapshot_id=altered_snapshot.snapshot_id, - ) - - with ( - patch("pyiceberg.table.snapshots.Snapshot.manifests", new=mock_read_manifest_side_effect), - patch("pyiceberg.manifest.ManifestFile.fetch_manifest_entry", return_value=[my_entry]), - ): - result = list( - _deleted_data_files( - table=table, - starting_snapshot=newest_snapshot, - data_filter=None, - parent_snapshot=oldest_snapshot, - partition_set=None, - ) - ) - - assert result == [my_entry] - - -def test_validate_deleted_data_files_raises_on_conflict( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], -) -> None: - table, _ = table_v2_with_extensive_snapshots_and_manifests - oldest_snapshot = table.snapshots()[0] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - class DummyEntry: - snapshot_id = 123 - - with patch("pyiceberg.table.update.validate._deleted_data_files", return_value=[DummyEntry()]): - with pytest.raises(ValidationException): - _validate_deleted_data_files( - table=table, - starting_snapshot=newest_snapshot, - data_filter=None, - parent_snapshot=oldest_snapshot, - ) - - -@pytest.mark.parametrize("operation", [Operation.APPEND, Operation.OVERWRITE]) -def test_validate_added_data_files_conflicting_count( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], - operation: Operation, -) -> None: - table, mock_manifests = table_v2_with_extensive_snapshots_and_manifests - - snapshot_history = 100 - snapshots = table.snapshots() - for i in range(1, snapshot_history + 1): - altered_snapshot = snapshots[-i] - altered_snapshot = altered_snapshot.model_copy(update={"summary": Summary(operation=operation)}) - snapshots[-i] = altered_snapshot - - table.metadata = table.metadata.model_copy( - update={"snapshots": snapshots}, - ) - - oldest_snapshot = table.snapshots()[-snapshot_history] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - def mock_read_manifest_side_effect(self: Snapshot, io: FileIO) -> list[ManifestFile]: - """Mock the manifests method to use the snapshot_id for lookup.""" - snapshot_id = self.snapshot_id - if snapshot_id in mock_manifests: - return mock_manifests[snapshot_id] - return [] - - def mock_fetch_manifest_entry(self: ManifestFile, io: FileIO, discard_deleted: bool = True) -> list[ManifestEntry]: - return [ - ManifestEntry.from_args( - status=ManifestEntryStatus.ADDED, - snapshot_id=self.added_snapshot_id, - ) - ] - - with ( - patch("pyiceberg.table.snapshots.Snapshot.manifests", new=mock_read_manifest_side_effect), - patch("pyiceberg.manifest.ManifestFile.fetch_manifest_entry", new=mock_fetch_manifest_entry), - ): - result = list( - _added_data_files( - table=table, - starting_snapshot=newest_snapshot, - data_filter=None, - parent_snapshot=oldest_snapshot, - partition_set=None, - ) - ) - - # since we only look at the ManifestContent.Data files - assert len(result) == snapshot_history / 2 - - -@pytest.mark.parametrize("operation", [Operation.DELETE, Operation.REPLACE]) -def test_validate_added_data_files_non_conflicting_count( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], - operation: Operation, -) -> None: - table, mock_manifests = table_v2_with_extensive_snapshots_and_manifests - - snapshot_history = 100 - snapshots = table.snapshots() - for i in range(1, snapshot_history + 1): - altered_snapshot = snapshots[-i] - altered_snapshot = altered_snapshot.model_copy(update={"summary": Summary(operation=operation)}) - snapshots[-i] = altered_snapshot - - table.metadata = table.metadata.model_copy( - update={"snapshots": snapshots}, - ) - - oldest_snapshot = table.snapshots()[-snapshot_history] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - def mock_read_manifest_side_effect(self: Snapshot, io: FileIO) -> list[ManifestFile]: - """Mock the manifests method to use the snapshot_id for lookup.""" - snapshot_id = self.snapshot_id - if snapshot_id in mock_manifests: - return mock_manifests[snapshot_id] - return [] - - def mock_fetch_manifest_entry(self: ManifestFile, io: FileIO, discard_deleted: bool = True) -> list[ManifestEntry]: - return [ - ManifestEntry.from_args( - status=ManifestEntryStatus.ADDED, - snapshot_id=self.added_snapshot_id, - ) - ] - - with ( - patch("pyiceberg.table.snapshots.Snapshot.manifests", new=mock_read_manifest_side_effect), - patch("pyiceberg.manifest.ManifestFile.fetch_manifest_entry", new=mock_fetch_manifest_entry), - ): - result = list( - _added_data_files( - table=table, - starting_snapshot=newest_snapshot, - data_filter=None, - parent_snapshot=oldest_snapshot, - partition_set=None, - ) - ) - - assert len(result) == 0 - - -def test_validate_added_data_files_raises_on_conflict( - table_v2_with_extensive_snapshots_and_manifests: tuple[Table, dict[int, list[ManifestFile]]], -) -> None: - table, _ = table_v2_with_extensive_snapshots_and_manifests - oldest_snapshot = table.snapshots()[0] - newest_snapshot = cast(Snapshot, table.current_snapshot()) - - class DummyEntry: - snapshot_id = 123 - - with patch("pyiceberg.table.update.validate._added_data_files", return_value=[DummyEntry()]): - with pytest.raises(ValidationException): - _validate_added_data_files( - table=table, - starting_snapshot=newest_snapshot, - data_filter=None, - parent_snapshot=oldest_snapshot, - ) diff --git a/tests/test_avro_sanitization.py b/tests/test_avro_sanitization.py deleted file mode 100644 index 0ca23e3165..0000000000 --- a/tests/test_avro_sanitization.py +++ /dev/null @@ -1,269 +0,0 @@ -# type: ignore -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -import tempfile -from typing import Any, Dict - -from fastavro import reader - -import pyiceberg.avro.file as avro -from pyiceberg.io.pyarrow import PyArrowFileIO -from pyiceberg.schema import ICEBERG_FIELD_NAME_PROP, Schema -from pyiceberg.typedef import Record -from pyiceberg.types import IntegerType, NestedField, StringType -from pyiceberg.utils.schema_conversion import AvroSchemaConversion, AvroType - - -class AvroTestRecord(Record): - """Test record class for Avro compatibility testing.""" - - @property - def valid_field(self) -> str: - return self._data[0] - - @property - def invalid_field(self) -> int: - return self._data[1] - - @property - def field_starting_with_digit(self) -> str: - return self._data[2] - - -def test_comprehensive_field_name_sanitization() -> None: - """Test comprehensive field name sanitization including edge cases and Java compatibility.""" - - test_cases = [ - # Java compatibility test cases - ("9x", "_9x"), - ("x_", "x_"), - ("a.b", "a_x2Eb"), - ("☃", "_x2603"), - ("a#b", "a_x23b"), - ("123", "_123"), - ("_", "_"), - ("a", "a"), - ("a1", "a1"), - ("1a", "_1a"), - ("a☃b", "a_x2603b"), - ("name#with#hash", "name_x23with_x23hash"), - ("123number", "_123number"), - ("😎", "_x1F60E"), - ("😎_with_text", "_x1F60E_with_text"), - ] - - for original_name, expected_sanitized in test_cases: - schema = Schema(NestedField(field_id=1, name=original_name, field_type=StringType(), required=True)) - - avro_schema: AvroType = AvroSchemaConversion().iceberg_to_avro(schema) - avro_dict: Dict[str, Any] = avro_schema - - assert avro_dict["fields"][0]["name"] == expected_sanitized - - if original_name != expected_sanitized: - assert avro_dict["fields"][0][ICEBERG_FIELD_NAME_PROP] == original_name - else: - assert ICEBERG_FIELD_NAME_PROP not in avro_dict["fields"][0] - - -def test_comprehensive_avro_compatibility() -> None: - """Test comprehensive Avro compatibility including complex schemas and file structure.""" - - # Create schema with various field name types - schema = Schema( - NestedField(field_id=1, name="valid_field", field_type=StringType(), required=True), - NestedField(field_id=2, name="invalid.field", field_type=IntegerType(), required=True), - NestedField(field_id=3, name="9x", field_type=StringType(), required=True), - NestedField(field_id=4, name="name#with#hash", field_type=StringType(), required=True), - NestedField(field_id=5, name="☃", field_type=IntegerType(), required=True), - NestedField(field_id=6, name="😎", field_type=IntegerType(), required=True), - ) - - test_records = [ - AvroTestRecord("hello", 42, "test", "hash_value", 100, 200), - AvroTestRecord("goodbye", 99, "example", "another_hash", 200, 300), - ] - - with tempfile.NamedTemporaryFile(suffix=".avro", delete=False) as tmp_file: - tmp_avro_file = tmp_file.name - - try: - with avro.AvroOutputFile[AvroTestRecord]( - output_file=PyArrowFileIO().new_output(tmp_avro_file), - file_schema=schema, - schema_name="test_schema", - metadata={"test": "metadata"}, - ) as output_file: - output_file.write_block(test_records) - - with open(tmp_avro_file, "rb") as fo: - # Test Avro file structure - magic = fo.read(4) - assert magic == b"Obj\x01" # Avro magic bytes - - import struct - - metadata_length = struct.unpack(">I", fo.read(4))[0] - assert metadata_length > 0 - - fo.seek(0) - avro_reader = reader(fo) - - avro_schema: AvroType = avro_reader.writer_schema - avro_dict: Dict[str, Any] = avro_schema - field_names = [field["name"] for field in avro_dict["fields"]] - - # Expected sanitized names (matching Java implementation) - expected_field_names = [ - "valid_field", - "invalid_x2Efield", - "_9x", - "name_x23with_x23hash", - "_x2603", - "_x1F60E", - ] - - assert field_names == expected_field_names - - # Verify iceberg-field-name properties - for field in avro_dict["fields"]: - field_dict: Dict[str, Any] = field - if field_dict["name"] == "invalid_x2Efield": - assert "iceberg-field-name" in field_dict - assert field_dict["iceberg-field-name"] == "invalid.field" - elif field_dict["name"] == "_9x": - assert "iceberg-field-name" in field_dict - assert field_dict["iceberg-field-name"] == "9x" - elif field_dict["name"] == "name_x23with_x23hash": - assert "iceberg-field-name" in field_dict - assert field_dict["iceberg-field-name"] == "name#with#hash" - elif field_dict["name"] == "_x2603": - assert "iceberg-field-name" in field_dict - assert field_dict["iceberg-field-name"] == "☃" - elif field_dict["name"] == "_x1F60E": - assert "iceberg-field-name" in field_dict - assert field_dict["iceberg-field-name"] == "😎" - else: - assert "iceberg-field-name" not in field_dict - - records = list(avro_reader) - assert len(records) == 2 - - # Verify data integrity - first_record = records[0] - assert first_record["valid_field"] == "hello" - assert first_record["invalid_x2Efield"] == 42 - assert first_record["_9x"] == "test" - assert first_record["name_x23with_x23hash"] == "hash_value" - assert first_record["_x2603"] == 100 - assert first_record["_x1F60E"] == 200 - - second_record = records[1] - assert second_record["valid_field"] == "goodbye" - assert second_record["invalid_x2Efield"] == 99 - assert second_record["_9x"] == "example" - assert second_record["name_x23with_x23hash"] == "another_hash" - assert second_record["_x2603"] == 200 - assert second_record["_x1F60E"] == 300 - - assert avro_reader.metadata.get("test") == "metadata" - - finally: - import os - - if os.path.exists(tmp_avro_file): - os.unlink(tmp_avro_file) - - -def test_emoji_field_name_sanitization() -> None: - """Test that emoji field names are properly sanitized according to Java implementation.""" - - schema = Schema( - NestedField(field_id=1, name="😎", field_type=IntegerType(), required=True), - NestedField(field_id=2, name="valid_field", field_type=StringType(), required=True), - NestedField(field_id=3, name="😎_with_text", field_type=StringType(), required=True), - ) - - avro_schema: AvroType = AvroSchemaConversion().iceberg_to_avro(schema, schema_name="emoji_test") - avro_dict: Dict[str, Any] = avro_schema - - field_names = [field["name"] for field in avro_dict["fields"]] - expected_field_names = [ - "_x1F60E", # 😎 becomes _x1F60E (Unicode 0x1F60E) - "valid_field", - "_x1F60E_with_text", - ] - - assert field_names == expected_field_names - - for field in avro_dict["fields"]: - field_dict: Dict[str, Any] = field - if field_dict["name"] == "_x1F60E": - assert field_dict["iceberg-field-name"] == "😎" - elif field_dict["name"] == "_x1F60E_with_text": - assert field_dict["iceberg-field-name"] == "😎_with_text" - else: - assert "iceberg-field-name" not in field_dict - - test_records = [ - AvroTestRecord(42, "hello", "world"), - ] - - with tempfile.NamedTemporaryFile(suffix=".avro", delete=False) as tmp_file: - tmp_avro_file = tmp_file.name - - try: - with avro.AvroOutputFile[AvroTestRecord]( - output_file=PyArrowFileIO().new_output(tmp_avro_file), - file_schema=schema, - schema_name="emoji_test", - ) as output_file: - output_file.write_block(test_records) - - with open(tmp_avro_file, "rb") as fo: - avro_reader = reader(fo) - - avro_schema_reader: AvroType = avro_reader.writer_schema - avro_dict_reader: Dict[str, Any] = avro_schema_reader - field_names_reader = [field["name"] for field in avro_dict_reader["fields"]] - - assert field_names_reader == expected_field_names - - for field in avro_dict_reader["fields"]: - field_dict_reader: Dict[str, Any] = field - if field_dict_reader["name"] == "_x1F60E": - assert field_dict_reader["iceberg-field-name"] == "😎" - elif field_dict_reader["name"] == "_x1F60E_with_text": - assert field_dict_reader["iceberg-field-name"] == "😎_with_text" - else: - assert "iceberg-field-name" not in field_dict_reader - - records = list(avro_reader) - assert len(records) == 1 - - first_record = records[0] - assert first_record["_x1F60E"] == 42 - assert first_record["valid_field"] == "hello" - assert first_record["_x1F60E_with_text"] == "world" - - finally: - import os - - if os.path.exists(tmp_avro_file): - os.unlink(tmp_avro_file) diff --git a/tests/test_conversions.py b/tests/test_conversions.py index 2ee0ba3dd9..f57998aa4e 100644 --- a/tests/test_conversions.py +++ b/tests/test_conversions.py @@ -43,10 +43,6 @@ - Stored as microseconds from 1970-01-01 00:00:00.000000 in an 8-byte little-endian long - 400000L is 0...110|00011010|10000000 in binary - 10000000 -> 128 (-128), 00011010 -> 26, 00000110 -> 6, ... , 00000000 -> 0 - TimestampNano: - - Stored as nanoseconds from 1970-01-01 00:00:00.000000000 in an 8-byte little-endian long - - 400000000L is 00010111|11010111|10000100|00000000 in binary - - 00000000 -> 0, 10000100 -> 124 (-124), 11010111 -> 41 (-41), 00010111 -> 23, ... , 00000000 -> 0 String: - Stored as UTF-8 bytes (without length) - 'A' -> 65, 'B' -> 66, 'C' -> 67 @@ -103,9 +99,7 @@ LongType, PrimitiveType, StringType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, UUIDType, @@ -272,8 +266,6 @@ def test_partition_to_py_raise_on_incorrect_precision_or_scale( (TimestamptzType(), b"\x00\xe8vH\x17\x00\x00\x00", 100000000000), (TimestampType(), b"\x80\x1a\x06\x00\x00\x00\x00\x00", 400000), (TimestampType(), b"\x00\xe8vH\x17\x00\x00\x00", 100000000000), - (TimestampNanoType(), b"\00\x84\xd7\x17\x00\x00\x00\x00", 400000000), - (TimestamptzNanoType(), b"\00\x84\xd7\x17\x00\x00\x00\x00", 400000000), (StringType(), b"ABC", "ABC"), (StringType(), b"foo", "foo"), ( @@ -553,52 +545,3 @@ def test_datetime_obj_to_bytes(primitive_type: PrimitiveType, value: Union[datet bytes_from_value = conversions.to_bytes(primitive_type, value) assert bytes_from_value == expected_bytes - - -@pytest.mark.parametrize( - "primitive_type, value, expected", - [ - (BooleanType(), True, True), - (IntegerType(), 34, 34), - (LongType(), 34, 34), - (FloatType(), 1.0, 1.0), - (DoubleType(), 1.0, 1.0), - (DecimalType(9, 4), Decimal("123.4500"), "123.4500"), - (DecimalType(9, 0), Decimal("2"), "2"), - (DecimalType(9, -20), Decimal("2E+20"), "2E+20"), - (DateType(), date(2017, 11, 16), "2017-11-16"), - (TimeType(), time(22, 31, 8, 123456), "22:31:08.123456"), - (TimestampType(), datetime(2017, 11, 16, 22, 31, 8, 123456), "2017-11-16T22:31:08.123456"), - (TimestamptzType(), datetime(2017, 11, 16, 22, 31, 8, 123456, tzinfo=timezone.utc), "2017-11-16T22:31:08.123456+00:00"), - (StringType(), "iceberg", "iceberg"), - (BinaryType(), b"\x01\x02\x03\xff", "010203ff"), - (FixedType(4), b"\x01\x02\x03\xff", "010203ff"), - ], -) -def test_json_single_serialization(primitive_type: PrimitiveType, value: Any, expected: Any) -> None: - json_val = conversions.to_json(primitive_type, value) - assert json_val == expected - - -@pytest.mark.parametrize( - "primitive_type, value", - [ - (BooleanType(), True), - (IntegerType(), 34), - (LongType(), 34), - (FloatType(), 1.0), - (DoubleType(), 1.0), - (DecimalType(9, 4), Decimal("123.4500")), - (DecimalType(9, 0), Decimal("2")), - (DecimalType(9, -20), Decimal("2E+20")), - (DateType(), date(2017, 11, 16)), - (TimeType(), time(22, 31, 8, 123456)), - (TimestampType(), datetime(2017, 11, 16, 22, 31, 8, 123456)), - (TimestamptzType(), datetime(2017, 11, 16, 22, 31, 8, 123456, tzinfo=timezone.utc)), - (StringType(), "iceberg"), - (BinaryType(), b"\x01\x02\x03\xff"), - (FixedType(4), b"\x01\x02\x03\xff"), - ], -) -def test_json_serialize_roundtrip(primitive_type: PrimitiveType, value: Any) -> None: - assert value == conversions.from_json(primitive_type, conversions.to_json(primitive_type, value)) diff --git a/tests/test_schema.py b/tests/test_schema.py index 3ca74c4027..daa46dee1f 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -21,13 +21,12 @@ import pyarrow as pa import pytest +from pyiceberg import schema from pyiceberg.exceptions import ResolveError, ValidationError from pyiceberg.schema import ( Accessor, Schema, build_position_accessors, - index_by_id, - index_by_name, promote, prune_columns, sanitize_column_names, @@ -91,7 +90,7 @@ def test_schema_str(table_schema_simple: Schema) -> None: def test_schema_repr_single_field() -> None: """Test schema representation""" - actual = repr(Schema(NestedField(field_id=1, name="foo", field_type=StringType()), schema_id=1)) + actual = repr(schema.Schema(NestedField(field_id=1, name="foo", field_type=StringType()), schema_id=1)) expected = "Schema(NestedField(field_id=1, name='foo', field_type=StringType(), required=False), schema_id=1, identifier_field_ids=[])" assert expected == actual @@ -99,7 +98,7 @@ def test_schema_repr_single_field() -> None: def test_schema_repr_two_fields() -> None: """Test schema representation""" actual = repr( - Schema( + schema.Schema( NestedField(field_id=1, name="foo", field_type=StringType()), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=False), schema_id=1, @@ -112,7 +111,7 @@ def test_schema_repr_two_fields() -> None: def test_schema_raise_on_duplicate_names() -> None: """Test schema representation""" with pytest.raises(ValueError) as exc_info: - Schema( + schema.Schema( NestedField(field_id=1, name="foo", field_type=StringType(), required=False), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), @@ -126,7 +125,7 @@ def test_schema_raise_on_duplicate_names() -> None: def test_schema_index_by_id_visitor(table_schema_nested: Schema) -> None: """Test index_by_id visitor function""" - index = index_by_id(table_schema_nested) + index = schema.index_by_id(table_schema_nested) assert index == { 1: NestedField(field_id=1, name="foo", field_type=StringType(), required=False), 2: NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), @@ -199,7 +198,7 @@ def test_schema_index_by_id_visitor(table_schema_nested: Schema) -> None: def test_schema_index_by_name_visitor(table_schema_nested: Schema) -> None: """Test index_by_name visitor function""" - table_schema_nested = Schema( + table_schema_nested = schema.Schema( NestedField(field_id=1, name="foo", field_type=StringType(), required=False), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), @@ -246,7 +245,7 @@ def test_schema_index_by_name_visitor(table_schema_nested: Schema) -> None: schema_id=1, identifier_field_ids=[2], ) - index = index_by_name(table_schema_nested) + index = schema.index_by_name(table_schema_nested) assert index == { "foo": 1, "bar": 2, @@ -302,7 +301,7 @@ def test_schema_find_column_name_by_id(table_schema_simple: Schema) -> None: def test_schema_find_field_by_id(table_schema_simple: Schema) -> None: """Test finding a column using its field ID""" - index = index_by_id(table_schema_simple) + index = schema.index_by_id(table_schema_simple) column1 = index[1] assert isinstance(column1, NestedField) @@ -325,7 +324,7 @@ def test_schema_find_field_by_id(table_schema_simple: Schema) -> None: def test_schema_find_field_by_id_raise_on_unknown_field(table_schema_simple: Schema) -> None: """Test raising when the field ID is not found among columns""" - index = index_by_id(table_schema_simple) + index = schema.index_by_id(table_schema_simple) with pytest.raises(Exception) as exc_info: _ = index[4] assert str(exc_info.value) == "4" @@ -333,7 +332,7 @@ def test_schema_find_field_by_id_raise_on_unknown_field(table_schema_simple: Sch def test_schema_find_field_type_by_id(table_schema_simple: Schema) -> None: """Test retrieving a columns' type using its field ID""" - index = index_by_id(table_schema_simple) + index = schema.index_by_id(table_schema_simple) assert index[1] == NestedField(field_id=1, name="foo", field_type=StringType(), required=False) assert index[2] == NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True) assert index[3] == NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False) @@ -342,7 +341,7 @@ def test_schema_find_field_type_by_id(table_schema_simple: Schema) -> None: def test_index_by_id_schema_visitor_raise_on_unregistered_type() -> None: """Test raising a NotImplementedError when an invalid type is provided to the index_by_id function""" with pytest.raises(NotImplementedError) as exc_info: - index_by_id("foo") # type: ignore + schema.index_by_id("foo") # type: ignore assert "Cannot visit non-type: foo" in str(exc_info.value) @@ -399,7 +398,6 @@ def test_build_position_accessors(table_schema_nested: Schema) -> None: 4: Accessor(position=3, inner=None), 6: Accessor(position=4, inner=None), 11: Accessor(position=5, inner=None), - 15: Accessor(position=6, inner=None), 16: Accessor(position=6, inner=Accessor(position=0, inner=None)), 17: Accessor(position=6, inner=Accessor(position=1, inner=None)), } @@ -927,7 +925,7 @@ def primitive_fields() -> List[NestedField]: ] -def test_add_top_level_primitives(primitive_fields: List[NestedField]) -> None: +def test_add_top_level_primitives(primitive_fields: NestedField) -> None: for primitive_field in primitive_fields: new_schema = Schema(primitive_field) applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore diff --git a/tests/test_transforms.py b/tests/test_transforms.py index 7a7d4a6d8e..51e8e23953 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -30,9 +30,7 @@ RootModel, WithJsonSchema, ) -from pytest_mock import MockFixture -from pyiceberg.exceptions import NotInstalledError from pyiceberg.expressions import ( AlwaysFalse, BooleanExpression, @@ -105,12 +103,9 @@ NestedField, PrimitiveType, StringType, - TimestampNanoType, TimestampType, - TimestamptzNanoType, TimestamptzType, TimeType, - UnknownType, UUIDType, ) from pyiceberg.utils.datetime import ( @@ -118,9 +113,7 @@ date_to_days, time_str_to_micros, timestamp_to_micros, - timestamp_to_nanos, timestamptz_to_micros, - timestamptz_to_nanos, ) @@ -148,26 +141,6 @@ ("iceberg", StringType(), 1210000089), (UUID("f79c3e09-677c-4bbd-a479-3f349cb785e7"), UUIDType(), 1488055340), (b"\xf7\x9c>\tg|K\xbd\xa4y?4\x9c\xb7\x85\xe7", UUIDType(), 1488055340), - ( - timestamp_to_nanos("2017-11-16T22:31:08.000001"), - TimestampNanoType(), - -1207196810, - ), - ( - timestamp_to_nanos("2017-11-16T22:31:08.000001001"), - TimestampNanoType(), - -1207196810, - ), - ( - timestamptz_to_nanos("2017-11-16T14:31:08.000001-08:00"), - TimestamptzNanoType(), - -1207196810, - ), - ( - timestamptz_to_nanos("2017-11-16T14:31:08.000001001-08:00"), - TimestamptzNanoType(), - -1207196810, - ), ], ) def test_bucket_hash_values(test_input: Any, test_type: PrimitiveType, expected: Any) -> None: @@ -229,31 +202,6 @@ def test_bucket_method(type_var: PrimitiveType) -> None: assert bucket_transform.to_human_string(type_var, "test") == "test" -@pytest.mark.parametrize( - "test_transform", - [ - BucketTransform(8), - TruncateTransform(10), - YearTransform(), - MonthTransform(), - DayTransform(), - HourTransform(), - UnknownTransform("unknown"), - ], -) -def test_transforms_unknown_type(test_transform: Transform[Any, Any]) -> None: - assert not test_transform.can_transform(UnknownType()) - with pytest.raises((ValueError, AttributeError)): - test_transform.transform(UnknownType()) - - -def test_identity_transform_unknown_type() -> None: - assert IdentityTransform().can_transform(UnknownType()) - assert IdentityTransform().result_type(UnknownType()) == UnknownType() - assert IdentityTransform().transform(UnknownType())(None) is None - assert IdentityTransform().to_human_string(UnknownType(), None) == "null" - - def test_string_with_surrogate_pair() -> None: string_with_surrogate_pair = "string with a surrogate pair: 💰" as_bytes = bytes(string_with_surrogate_pair, UTF8) @@ -1333,9 +1281,6 @@ def test_negative_year_strict_upper_bound(bound_reference_date: BoundReference[i def test_strict_bucket_integer(bound_reference_int: BoundReference[int]) -> None: value = literal(100).to(IntegerType()) transform = BucketTransform(num_buckets=10) - - _assert_projection_strict(BoundIsNull(term=bound_reference_int), transform, AlwaysFalse) - _assert_projection_strict(BoundNotEqualTo(term=bound_reference_int, literal=value), transform, NotEqualTo, "6") for expr in [BoundEqualTo, BoundLessThan, BoundLessThanOrEqual, BoundGreaterThan, BoundGreaterThanOrEqual]: @@ -1349,9 +1294,6 @@ def test_strict_bucket_integer(bound_reference_int: BoundReference[int]) -> None def test_strict_bucket_long(bound_reference_long: BoundReference[int]) -> None: value = literal(100).to(LongType()) transform = BucketTransform(num_buckets=10) - - _assert_projection_strict(BoundIsNull(term=bound_reference_long), transform, AlwaysFalse) - _assert_projection_strict(BoundNotEqualTo(term=bound_reference_long, literal=value), transform, NotEqualTo, "6") for expr in [BoundEqualTo, BoundLessThan, BoundLessThanOrEqual, BoundGreaterThan, BoundGreaterThanOrEqual]: @@ -1366,9 +1308,6 @@ def test_strict_bucket_decimal(bound_reference_decimal: BoundReference[int]) -> dec = DecimalType(9, 2) value = literal("100.00").to(dec) transform = BucketTransform(num_buckets=10) - - _assert_projection_strict(BoundIsNull(term=bound_reference_decimal), transform, AlwaysFalse) - _assert_projection_strict(BoundNotEqualTo(term=bound_reference_decimal, literal=value), transform, NotEqualTo, "2") for expr in [BoundEqualTo, BoundLessThan, BoundLessThanOrEqual, BoundGreaterThan, BoundGreaterThanOrEqual]: @@ -1382,9 +1321,6 @@ def test_strict_bucket_decimal(bound_reference_decimal: BoundReference[int]) -> def test_strict_bucket_string(bound_reference_str: BoundReference[int]) -> None: value = literal("abcdefg").to(StringType()) transform = BucketTransform(num_buckets=10) - - _assert_projection_strict(BoundIsNull(term=bound_reference_str), transform, AlwaysFalse) - _assert_projection_strict(BoundNotEqualTo(term=bound_reference_str, literal=value), transform, NotEqualTo, "4") for expr in [BoundEqualTo, BoundLessThan, BoundLessThanOrEqual, BoundGreaterThan, BoundGreaterThanOrEqual]: @@ -1398,9 +1334,6 @@ def test_strict_bucket_string(bound_reference_str: BoundReference[int]) -> None: def test_strict_bucket_bytes(bound_reference_binary: BoundReference[int]) -> None: value = literal(str.encode("abcdefg")).to(BinaryType()) transform = BucketTransform(num_buckets=10) - - _assert_projection_strict(BoundIsNull(term=bound_reference_binary), transform, AlwaysFalse) - _assert_projection_strict(BoundNotEqualTo(term=bound_reference_binary, literal=value), transform, NotEqualTo, "4") for expr in [BoundEqualTo, BoundLessThan, BoundLessThanOrEqual, BoundGreaterThan, BoundGreaterThanOrEqual]: @@ -1414,9 +1347,6 @@ def test_strict_bucket_bytes(bound_reference_binary: BoundReference[int]) -> Non def test_strict_bucket_uuid(bound_reference_uuid: BoundReference[int]) -> None: value = literal("00000000-0000-007b-0000-0000000001c8").to(UUIDType()) transform = BucketTransform(num_buckets=10) - - _assert_projection_strict(BoundIsNull(term=bound_reference_uuid), transform, AlwaysFalse) - _assert_projection_strict(BoundNotEqualTo(term=bound_reference_uuid, literal=value), transform, NotEqualTo, "4") for expr in [BoundEqualTo, BoundLessThan, BoundLessThanOrEqual, BoundGreaterThan, BoundGreaterThanOrEqual]: @@ -1629,7 +1559,7 @@ def test_ymd_pyarrow_transforms( ] else: with pytest.raises(ValueError): - transform.pyarrow_transform(source_type)(arrow_table_date_timestamps[source_col]) + transform.pyarrow_transform(DateType())(arrow_table_date_timestamps[source_col]) @pytest.mark.parametrize( @@ -1670,15 +1600,3 @@ def test_truncate_pyarrow_transforms( ) -> None: transform: Transform[Any, Any] = TruncateTransform(width=width) assert expected == transform.pyarrow_transform(source_type)(input_arr) - - -@pytest.mark.parametrize( - "transform", [BucketTransform(num_buckets=5), TruncateTransform(width=5), YearTransform(), MonthTransform(), DayTransform()] -) -def test_calling_pyarrow_transform_without_pyiceberg_core_installed_correctly_raises_not_imported_error( - transform, mocker: MockFixture -) -> None: - mocker.patch.dict("sys.modules", {"pyiceberg_core": None}) - - with pytest.raises(NotInstalledError): - transform.pyarrow_transform(StringType()) diff --git a/tests/test_typedef.py b/tests/test_typedef.py index fbbb619968..43388addca 100644 --- a/tests/test_typedef.py +++ b/tests/test_typedef.py @@ -16,7 +16,14 @@ # under the License. import pytest +from pyiceberg.schema import Schema from pyiceberg.typedef import FrozenDict, KeyDefaultDict, Record +from pyiceberg.types import ( + IntegerType, + NestedField, + StringType, + StructType, +) def test_setitem_frozendict() -> None: @@ -39,11 +46,44 @@ def one(_: int) -> int: assert defaultdict[22] == 1 -def test_record_named_args() -> None: +def test_record_repr(table_schema_simple: Schema) -> None: + r = Record("vo", 1, True, struct=table_schema_simple.as_struct()) + assert repr(r) == "Record[foo='vo', bar=1, baz=True]" + + +def test_named_record() -> None: + r = Record(struct=StructType(NestedField(0, "id", IntegerType()), NestedField(1, "name", StringType()))) + + with pytest.raises(AttributeError): + assert r.id is None # type: ignore + + with pytest.raises(AttributeError): + assert r.name is None # type: ignore + + r[0] = 123 + r[1] = "abc" + + assert r[0] == 123 + assert r[1] == "abc" + + assert r.id == 123 # type: ignore + assert r.name == "abc" # type: ignore + + +def test_record_positional_args() -> None: r = Record(1, "a", True) + assert repr(r) == "Record[field1=1, field2='a', field3=True]" + + +def test_record_named_args() -> None: + r = Record(foo=1, bar="a", baz=True) + + assert r.foo == 1 # type: ignore + assert r.bar == "a" # type: ignore + assert r.baz is True # type: ignore assert r[0] == 1 assert r[1] == "a" assert r[2] is True - assert repr(r) == "Record[1, a, True]" + assert repr(r) == "Record[foo=1, bar='a', baz=True]" diff --git a/tests/test_types.py b/tests/test_types.py index 18eb909d09..b19df17e08 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -62,21 +62,6 @@ (12, BinaryType), ] -primitive_types = { - "boolean": BooleanType, - "int": IntegerType, - "long": LongType, - "float": FloatType, - "double": DoubleType, - "date": DateType, - "time": TimeType, - "timestamp": TimestampType, - "timestamptz": TimestamptzType, - "string": StringType, - "uuid": UUIDType, - "binary": BinaryType, -} - @pytest.mark.parametrize("input_index, input_type", non_parameterized_types) def test_repr_primitive_types(input_index: int, input_type: Type[PrimitiveType]) -> None: @@ -240,34 +225,10 @@ def test_nested_field() -> None: assert str(field_var) == str(eval(repr(field_var))) assert field_var == pickle.loads(pickle.dumps(field_var)) - with pytest.raises(pydantic_core.ValidationError, match=".*validation errors for NestedField.*"): + with pytest.raises(pydantic_core.ValidationError) as exc_info: _ = (NestedField(1, "field", StringType(), required=True, write_default=(1, "a", True)),) # type: ignore - -def test_nested_field_complex_type_as_str_unsupported() -> None: - unsupported_types = ["list", "map", "struct"] - for type_str in unsupported_types: - with pytest.raises(ValueError) as exc_info: - _ = NestedField(1, "field", type_str, required=True) - assert f"Unsupported field type: '{type_str}'" in str(exc_info.value) - - -def test_nested_field_primitive_type_as_str() -> None: - for type_str, type_class in primitive_types.items(): - field_var = NestedField( - 1, - "field", - type_str, - required=True, - ) - assert isinstance(field_var.field_type, type_class), ( - f"Expected {type_class.__name__}, got {field_var.field_type.__class__.__name__}" - ) - - # Test that passing 'bool' raises a ValueError, as it should be 'boolean' - with pytest.raises(ValueError) as exc_info: - _ = NestedField(1, "field", "bool", required=True) - assert "Unsupported field type: 'bool'" in str(exc_info.value) + assert "validation errors for NestedField" in str(exc_info.value) @pytest.mark.parametrize("input_index,input_type", non_parameterized_types) @@ -525,21 +486,6 @@ def test_repr_decimal() -> None: assert repr(DecimalType(19, 25)) == "DecimalType(precision=19, scale=25)" -def test_repr_nested_field_default_nones_should_not_appear() -> None: - assert ( - repr(NestedField(1, "required_field", StringType(), required=False, initial_default=None, write_default=None)) - == "NestedField(field_id=1, name='required_field', field_type=StringType(), required=False)" - ) - assert ( - repr(NestedField(1, "required_field", StringType(), required=False, initial_default="hello", write_default=None)) - == "NestedField(field_id=1, name='required_field', field_type=StringType(), required=False, initial_default='hello')" - ) - assert ( - repr(NestedField(1, "required_field", StringType(), required=False, initial_default="hello", write_default="bye")) - == "NestedField(field_id=1, name='required_field', field_type=StringType(), required=False, initial_default='hello', write_default='bye')" - ) - - def test_serialization_nestedfield() -> None: expected = '{"id":1,"name":"required_field","type":"string","required":true,"doc":"this is a doc"}' actual = NestedField(1, "required_field", StringType(), True, "this is a doc").model_dump_json() diff --git a/tests/utils/test_config.py b/tests/utils/test_config.py index d70b0345f8..89247d8fca 100644 --- a/tests/utils/test_config.py +++ b/tests/utils/test_config.py @@ -56,14 +56,6 @@ def test_fix_nested_objects_from_environment_variables() -> None: } -@mock.patch.dict(os.environ, EXAMPLE_ENV) -@mock.patch.dict(os.environ, {"PYICEBERG_CATALOG__DEVELOPMENT__URI": "https://dev.service.io/api"}) -def test_list_all_known_catalogs() -> None: - catalogs = Config().get_known_catalogs() - assert "production" in catalogs - assert "development" in catalogs - - def test_from_configuration_files(tmp_path_factory: pytest.TempPathFactory) -> None: config_path = str(tmp_path_factory.mktemp("config")) with open(f"{config_path}/.pyiceberg.yaml", "w", encoding=UTF8) as file: diff --git a/tests/utils/test_datetime.py b/tests/utils/test_datetime.py index 6f6f4a9114..ac7ba54547 100644 --- a/tests/utils/test_datetime.py +++ b/tests/utils/test_datetime.py @@ -14,21 +14,12 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from datetime import datetime, time, timezone, tzinfo +from datetime import datetime, timezone, tzinfo import pytest import pytz -from pyiceberg.utils.datetime import ( - datetime_to_millis, - datetime_to_nanos, - millis_to_datetime, - nanos_to_micros, - time_str_to_nanos, - time_to_nanos, - timestamp_to_nanos, - timestamptz_to_nanos, -) +from pyiceberg.utils.datetime import datetime_to_millis, millis_to_datetime timezones = [ pytz.timezone("Etc/GMT"), @@ -80,55 +71,3 @@ def test_datetime_tz_to_millis(tz: tzinfo) -> None: def test_millis_to_datetime() -> None: assert millis_to_datetime(1690971805918) == datetime(2023, 8, 2, 10, 23, 25, 918000) - - -@pytest.mark.parametrize("time_str, nanos", [("00:00:00+00:00", 0), ("20:21:44.375612-05:00", 73304375612000)]) -def test_time_str_to_nanos(time_str: str, nanos: int) -> None: - assert nanos == time_str_to_nanos(time_str) - - -@pytest.mark.parametrize( - "time_, nanos", [(time(0, 0, 0), 0), (time(20, 21, 44, 375612, tzinfo=pytz.timezone("Etc/GMT-5")), 73304375612000)] -) -def test_time_to_nanos(time_: time, nanos: int) -> None: - assert nanos == time_to_nanos(time_) - - -@pytest.mark.parametrize( - "datetime_, nanos", - [ - (datetime(1970, 1, 1, 0, 0, 0), 0), - (datetime(2025, 2, 23, 20, 21, 44, 375612, tzinfo=pytz.timezone("Etc/GMT-5")), 1740324104375612000), - ], -) -def test_datetime_to_nanos(datetime_: datetime, nanos: int) -> None: - assert nanos == datetime_to_nanos(datetime_) - - -@pytest.mark.parametrize( - "timestamp, nanos", - [ - ("1970-01-01T00:00:00", 0), - ("2025-02-23T20:21:44.375612", 1740342104375612000), - ("2025-02-23T20:21:44.375612001", 1740342104375612001), - ], -) -def test_timestamp_to_nanos(timestamp: str, nanos: int) -> None: - assert nanos == timestamp_to_nanos(timestamp) - - -@pytest.mark.parametrize( - "timestamp, nanos", - [ - ("1970-01-01T00:00:00+00:00", 0), - ("2025-02-23T16:21:44.375612-04:00", 1740342104375612000), - ("2025-02-23T16:21:44.375612001-04:00", 1740342104375612001), - ], -) -def test_timestamptz_to_nanos(timestamp: str, nanos: int) -> None: - assert nanos == timestamptz_to_nanos(timestamp) - - -@pytest.mark.parametrize("nanos, micros", [(1510871468000001001, 1510871468000001), (-1510871468000001001, -1510871468000002)]) -def test_nanos_to_micros(nanos: int, micros: int) -> None: - assert micros == nanos_to_micros(nanos) diff --git a/tests/utils/test_manifest.py b/tests/utils/test_manifest.py index 7c62b9564c..3b1fc6f013 100644 --- a/tests/utils/test_manifest.py +++ b/tests/utils/test_manifest.py @@ -22,7 +22,6 @@ import fastavro import pytest -from pyiceberg.avro.codecs import AvroCompressionCodec from pyiceberg.io import load_file_io from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import ( @@ -38,9 +37,10 @@ write_manifest, write_manifest_list, ) -from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec +from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.table.snapshots import Operation, Snapshot, Summary +from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import Record, TableVersion from pyiceberg.types import IntegerType, NestedField @@ -61,7 +61,7 @@ def _verify_metadata_with_fastavro(avro_file: str, expected_metadata: Dict[str, def test_read_manifest_entry(generated_manifest_entry_file: str) -> None: - manifest = ManifestFile.from_args( + manifest = ManifestFile( manifest_path=generated_manifest_entry_file, manifest_length=0, partition_spec_id=0, @@ -79,13 +79,13 @@ def test_read_manifest_entry(generated_manifest_entry_file: str) -> None: data_file = manifest_entry.data_file - assert data_file.content == DataFileContent.DATA + assert data_file.content is DataFileContent.DATA assert ( data_file.file_path == "/home/iceberg/warehouse/nyc/taxis_partitioned/data/VendorID=null/00000-633-d8a4223e-dc97-45a1-86e1-adaba6e8abd7-00001.parquet" ) assert data_file.file_format == FileFormat.PARQUET - assert repr(data_file.partition) == "Record[1, 1925]" + assert repr(data_file.partition) == "Record[VendorID=1, tpep_pickup_datetime=1925]" assert data_file.record_count == 19513 assert data_file.file_size_in_bytes == 388872 assert data_file.column_sizes == { @@ -153,8 +153,8 @@ def test_read_manifest_entry(generated_manifest_entry_file: str) -> None: } assert data_file.nan_value_counts == {16: 0, 17: 0, 18: 0, 19: 0, 10: 0, 11: 0, 12: 0, 13: 0, 14: 0, 15: 0} assert data_file.lower_bounds == { - 2: b"\x01\x00\x00\x00\x00\x00\x00\x00", - 3: b"\x01\x00\x00\x00\x00\x00\x00\x00", + 2: b"2020-04-01 00:00", + 3: b"2020-04-01 00:12", 7: b"\x03\x00\x00\x00", 8: b"\x01\x00\x00\x00", 10: b"\xf6(\\\x8f\xc2\x05S\xc0", @@ -168,8 +168,8 @@ def test_read_manifest_entry(generated_manifest_entry_file: str) -> None: 19: b"\x00\x00\x00\x00\x00\x00\x04\xc0", } assert data_file.upper_bounds == { - 2: b"\x06\x00\x00\x00\x00\x00\x00\x00", - 3: b"\x06\x00\x00\x00\x00\x00\x00\x00", + 2: b"2020-04-30 23:5:", + 3: b"2020-05-01 00:41", 7: b"\t\x01\x00\x00", 8: b"\t\x01\x00\x00", 10: b"\xcd\xcc\xcc\xcc\xcc,_@", @@ -351,20 +351,13 @@ def test_write_empty_manifest() -> None: schema=test_schema, output_file=io.new_output(tmp_avro_file), snapshot_id=8744736658442914487, - avro_compression="deflate", ) as _: pass @pytest.mark.parametrize("format_version", [1, 2]) -@pytest.mark.parametrize("compression", ["null", "deflate", "zstd"]) def test_write_manifest( - generated_manifest_file_file_v1: str, - generated_manifest_file_file_v2: str, - format_version: TableVersion, - test_schema: Schema, - test_partition_spec: PartitionSpec, - compression: AvroCompressionCodec, + generated_manifest_file_file_v1: str, generated_manifest_file_file_v2: str, format_version: TableVersion ) -> None: io = load_file_io() snapshot = Snapshot( @@ -377,16 +370,23 @@ def test_write_manifest( ) demo_manifest_file = snapshot.manifests(io)[0] manifest_entries = demo_manifest_file.fetch_manifest_entry(io) + test_schema = Schema( + NestedField(1, "VendorID", IntegerType(), False), NestedField(2, "tpep_pickup_datetime", IntegerType(), False) + ) + test_spec = PartitionSpec( + PartitionField(source_id=1, field_id=1, transform=IdentityTransform(), name="VendorID"), + PartitionField(source_id=2, field_id=2, transform=IdentityTransform(), name="tpep_pickup_datetime"), + spec_id=demo_manifest_file.partition_spec_id, + ) with TemporaryDirectory() as tmpdir: tmp_avro_file = tmpdir + "/test_write_manifest.avro" output = io.new_output(tmp_avro_file) with write_manifest( format_version=format_version, - spec=test_partition_spec, + spec=test_spec, schema=test_schema, output_file=output, snapshot_id=8744736658442914487, - avro_compression=compression, ) as writer: for entry in manifest_entries: writer.add_entry(entry) @@ -397,7 +397,7 @@ def test_write_manifest( expected_metadata = { "schema": test_schema.model_dump_json(), - "partition-spec": """[{"source-id":1,"field-id":1000,"transform":"identity","name":"VendorID"},{"source-id":2,"field-id":1001,"transform":"day","name":"tpep_pickup_day"}]""", + "partition-spec": """[{"source-id":1,"field-id":1,"transform":"identity","name":"VendorID"},{"source-id":2,"field-id":2,"transform":"identity","name":"tpep_pickup_datetime"}]""", "partition-spec-id": str(demo_manifest_file.partition_spec_id), "format-version": str(format_version), } @@ -416,13 +416,13 @@ def test_write_manifest( data_file = manifest_entry.data_file - assert data_file.content == DataFileContent.DATA + assert data_file.content is DataFileContent.DATA assert ( data_file.file_path == "/home/iceberg/warehouse/nyc/taxis_partitioned/data/VendorID=null/00000-633-d8a4223e-dc97-45a1-86e1-adaba6e8abd7-00001.parquet" ) assert data_file.file_format == FileFormat.PARQUET - assert data_file.partition == Record(1, 1925) + assert data_file.partition == Record(VendorID=1, tpep_pickup_datetime=1925) assert data_file.record_count == 19513 assert data_file.file_size_in_bytes == 388872 assert data_file.column_sizes == { @@ -490,8 +490,8 @@ def test_write_manifest( } assert data_file.nan_value_counts == {16: 0, 17: 0, 18: 0, 19: 0, 10: 0, 11: 0, 12: 0, 13: 0, 14: 0, 15: 0} assert data_file.lower_bounds == { - 2: b"\x01\x00\x00\x00\x00\x00\x00\x00", - 3: b"\x01\x00\x00\x00\x00\x00\x00\x00", + 2: b"2020-04-01 00:00", + 3: b"2020-04-01 00:12", 7: b"\x03\x00\x00\x00", 8: b"\x01\x00\x00\x00", 10: b"\xf6(\\\x8f\xc2\x05S\xc0", @@ -505,8 +505,8 @@ def test_write_manifest( 19: b"\x00\x00\x00\x00\x00\x00\x04\xc0", } assert data_file.upper_bounds == { - 2: b"\x06\x00\x00\x00\x00\x00\x00\x00", - 3: b"\x06\x00\x00\x00\x00\x00\x00\x00", + 2: b"2020-04-30 23:5:", + 3: b"2020-05-01 00:41", 7: b"\t\x01\x00\x00", 8: b"\t\x01\x00\x00", 10: b"\xcd\xcc\xcc\xcc\xcc,_@", @@ -527,13 +527,11 @@ def test_write_manifest( @pytest.mark.parametrize("format_version", [1, 2]) @pytest.mark.parametrize("parent_snapshot_id", [19, None]) -@pytest.mark.parametrize("compression", ["null", "deflate"]) def test_write_manifest_list( generated_manifest_file_file_v1: str, generated_manifest_file_file_v2: str, format_version: TableVersion, parent_snapshot_id: Optional[int], - compression: AvroCompressionCodec, ) -> None: io = load_file_io() @@ -556,7 +554,6 @@ def test_write_manifest_list( snapshot_id=25, parent_snapshot_id=parent_snapshot_id, sequence_number=0, - avro_compression=compression, ) as writer: writer.add_manifests(demo_manifest_list) new_manifest_list = list(read_manifest_list(io.new_input(path))) @@ -624,9 +621,9 @@ def test_write_manifest_list( def test_file_format_case_insensitive(raw_file_format: str, expected_file_format: FileFormat) -> None: if expected_file_format: parsed_file_format = FileFormat(raw_file_format) - assert parsed_file_format == expected_file_format, ( - f"File format {raw_file_format}: {parsed_file_format} != {expected_file_format}" - ) + assert ( + parsed_file_format == expected_file_format + ), f"File format {raw_file_format}: {parsed_file_format} != {expected_file_format}" else: with pytest.raises(ValueError): _ = FileFormat(raw_file_format) diff --git a/tests/utils/test_schema_conversion.py b/tests/utils/test_schema_conversion.py index eb44dcdff3..2c42c445e4 100644 --- a/tests/utils/test_schema_conversion.py +++ b/tests/utils/test_schema_conversion.py @@ -33,9 +33,6 @@ NestedField, StringType, StructType, - TimestampType, - UnknownType, - UUIDType, ) from pyiceberg.utils.schema_conversion import AvroSchemaConversion @@ -266,19 +263,19 @@ def test_fixed_type() -> None: def test_unknown_primitive() -> None: - avro_type = "null" - actual = AvroSchemaConversion()._convert_schema(avro_type) - expected = UnknownType() - assert actual == expected + with pytest.raises(TypeError) as exc_info: + avro_type = "UnknownType" + AvroSchemaConversion()._convert_schema(avro_type) + assert "Unknown type: UnknownType" in str(exc_info.value) -def test_unrecognized_complex_type() -> None: +def test_unknown_complex_type() -> None: with pytest.raises(TypeError) as exc_info: avro_type = { - "type": "UnrecognizedType", + "type": "UnknownType", } AvroSchemaConversion()._convert_schema(avro_type) - assert "Type not recognized: {'type': 'UnrecognizedType'}" in str(exc_info.value) + assert "Unknown type: {'type': 'UnknownType'}" in str(exc_info.value) def test_convert_field_without_field_id() -> None: @@ -329,24 +326,6 @@ def test_convert_date_type() -> None: assert actual == DateType() -def test_convert_uuid_str_type() -> None: - avro_logical_type = {"type": "string", "logicalType": "uuid"} - actual = AvroSchemaConversion()._convert_logical_type(avro_logical_type) - assert actual == UUIDType() - - -def test_convert_uuid_fixed_type() -> None: - avro_logical_type = {"type": "fixed", "logicalType": "uuid"} - actual = AvroSchemaConversion()._convert_logical_type(avro_logical_type) - assert actual == UUIDType() - - -def test_convert_timestamp_micros_type() -> None: - avro_logical_type = {"type": "int", "logicalType": "timestamp-micros"} - actual = AvroSchemaConversion()._convert_logical_type(avro_logical_type) - assert actual == TimestampType() - - def test_unknown_logical_type() -> None: """Test raising a ValueError when converting an unknown logical type as part of an Avro schema conversion""" avro_logical_type = {"type": "bytes", "logicalType": "date"} diff --git a/vendor/Makefile b/vendor/Makefile deleted file mode 100644 index 57a135ed08..0000000000 --- a/vendor/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# Makefile for generating vendor packages - -.PHONY: all clean fb303 hive-metastore - -all: fb303 hive-metastore - -# FB303 Thrift client generation -fb303: - rm -f /tmp/fb303.thrift - rm -rf fb303 - curl -s https://raw.githubusercontent.com/apache/thrift/master/contrib/fb303/if/fb303.thrift > /tmp/fb303.thrift - rm -rf /tmp/gen-py/ - thrift -gen py -o /tmp/ /tmp/fb303.thrift - mv /tmp/gen-py/fb303 fb303 - -# Hive Metastore Thrift definition generation -hive-metastore: - rm -rf /tmp/hive - mkdir -p /tmp/hive/share/fb303/if/ - curl -s https://raw.githubusercontent.com/apache/thrift/master/contrib/fb303/if/fb303.thrift > /tmp/hive/share/fb303/if/fb303.thrift - curl -s https://raw.githubusercontent.com/apache/hive/master/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift > /tmp/hive/hive_metastore.thrift - thrift -gen py -o /tmp/hive /tmp/hive/hive_metastore.thrift - rm -rf hive_metastore - mv /tmp/hive/gen-py/hive_metastore hive_metastore diff --git a/vendor/README.md b/vendor/README.md index 8f033769fe..0b55d9e5c6 100644 --- a/vendor/README.md +++ b/vendor/README.md @@ -18,42 +18,28 @@ Some packages we want to maintain in the repository itself, because there is no good 3rd party alternative. -## Quick Setup - -Generate all vendor packages: - -```bash -make all -``` - -Generate individual packages: - -```bash -make fb303 # FB303 Thrift client only -make hive-metastore # Hive Metastore Thrift definitions only -``` - -## Packages - -### FB303 Thrift client +## FB303 Thrift client fb303 is a base Thrift service and a common set of functionality for querying stats, options, and other information from a service. -**Generate with Make:** ```bash -make fb303 +rm -f /tmp/fb303.thrift +rm -rf fb303 +curl -s https://raw.githubusercontent.com/apache/thrift/master/contrib/fb303/if/fb303.thrift > /tmp/fb303.thrift +rm -rf /tmp/gen-py/ +thrift -gen py -o /tmp/ /tmp/fb303.thrift +mv /tmp/gen-py/fb303 fb303 ``` # Hive Metastore Thrift definition The thrift definition require the fb303 service as a dependency -**Generate with Make:** ```bash -make hive-metastore +rm -rf /tmp/hive +mkdir -p /tmp/hive/share/fb303/if/ +curl -s https://raw.githubusercontent.com/apache/thrift/master/contrib/fb303/if/fb303.thrift > /tmp/hive/share/fb303/if/fb303.thrift +curl -s https://raw.githubusercontent.com/apache/hive/master/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift > /tmp/hive/hive_metastore.thrift +thrift -gen py -o /tmp/hive /tmp/hive/hive_metastore.thrift +mv /tmp/hive/gen-py/hive_metastore hive_metastore ``` - -## Requirements - -- Apache Thrift compiler (`thrift`) -- `curl` for downloading Thrift definitions \ No newline at end of file