diff --git a/buf.gen.yaml b/buf.gen.yaml new file mode 100644 index 0000000..90932ea --- /dev/null +++ b/buf.gen.yaml @@ -0,0 +1,7 @@ +version: v2 +plugins: + # NOTE: v26.0 is the earliest version supporting protobuf==5. + - remote: buf.build/protocolbuffers/python:v26.0 + out: src/connectrpc/_gen + - remote: buf.build/protocolbuffers/pyi:v26.0 + out: src/connectrpc/_gen diff --git a/buf.yaml b/buf.yaml new file mode 100644 index 0000000..d374062 --- /dev/null +++ b/buf.yaml @@ -0,0 +1,3 @@ +version: v2 +modules: +- path: proto diff --git a/conformance/test/config.yaml b/conformance/test/config.yaml index a29944a..91d2cc2 100644 --- a/conformance/test/config.yaml +++ b/conformance/test/config.yaml @@ -4,6 +4,7 @@ features: - HTTP_VERSION_2 protocols: - PROTOCOL_CONNECT + - PROTOCOL_GRPC codecs: - CODEC_PROTO - CODEC_JSON @@ -21,7 +22,7 @@ features: supports_h2c: true supports_tls: true supports_tls_client_certs: true - supports_trailers: false + supports_trailers: true supports_half_duplex_bidi_over_http1: true supports_connect_get: true # This currently only tests the server diff --git a/conformance/test/test_client.py b/conformance/test/test_client.py index b30586f..22426ca 100644 --- a/conformance/test/test_client.py +++ b/conformance/test/test_client.py @@ -12,8 +12,22 @@ _client_py_path = str(_current_dir / "client.py") _config_path = str(_current_dir / "config.yaml") +_skipped_tests = [ + # Not implemented yet, + "--skip", + "**/Protocol:PROTOCOL_GRPC/**", + "--skip", + "gRPC Trailers/**", + "--skip", + "gRPC Unexpected Responses/**", + "--skip", + "gRPC Empty Responses/**", + "--skip", + "gRPC Proto Sub-Format Responses/**", +] _skipped_tests_sync = [ + *_skipped_tests, # Need to use async APIs for proper cancellation support in Python. "--skip", "Client Cancellation/**", @@ -46,6 +60,7 @@ def test_client_sync() -> None: _skipped_tests_async = [ + *_skipped_tests, # Cancellation currently not working for full duplex "--skip", "Client Cancellation/**/full-duplex/**", diff --git a/conformance/test/test_server.py b/conformance/test/test_server.py index 459cfb9..d44a123 100644 --- a/conformance/test/test_server.py +++ b/conformance/test/test_server.py @@ -41,9 +41,6 @@ def macos_raise_ulimit(): @pytest.mark.parametrize("server", ["gunicorn", "pyvoy"]) def test_server_sync(server: str) -> None: - if server == "pyvoy" and sys.platform == "win32": - pytest.skip("pyvoy not supported on Windows") - args = maybe_patch_args_with_debug( [sys.executable, _server_py_path, "--mode", "sync", "--server", server] ) @@ -77,9 +74,6 @@ def test_server_sync(server: str) -> None: @pytest.mark.parametrize("server", ["daphne", "pyvoy", "uvicorn"]) def test_server_async(server: str) -> None: - if server == "pyvoy" and sys.platform == "win32": - pytest.skip("pyvoy not supported on Windows") - args = maybe_patch_args_with_debug( [sys.executable, _server_py_path, "--mode", "async", "--server", server] ) @@ -94,6 +88,13 @@ def test_server_async(server: str) -> None: # it only works with websockets "--skip", "**/full-duplex/**", + # daphne doesn't support trailers + "--skip", + "**/Protocol:PROTOCOL_GRPC/**", + "--skip", + "gRPC Proto Sub-Format Requests/**", + "--skip", + "gRPC Unexpected Requests/**", ] case "uvicorn": # uvicorn doesn't support HTTP/2 diff --git a/example/buf.gen.yaml b/example/buf.gen.yaml index 4540366..505d9c0 100644 --- a/example/buf.gen.yaml +++ b/example/buf.gen.yaml @@ -5,6 +5,8 @@ plugins: out: . - remote: buf.build/protocolbuffers/pyi:v26.0 out: . + - remote: buf.build/grpc/python:v1.76.0 + out: . - local: - go - run diff --git a/example/example/eliza_pb2_grpc.py b/example/example/eliza_pb2_grpc.py new file mode 100644 index 0000000..8c9c709 --- /dev/null +++ b/example/example/eliza_pb2_grpc.py @@ -0,0 +1,207 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" + +from __future__ import annotations + +import grpc + +from example import eliza_pb2 as example_dot_eliza__pb2 + + +class ElizaServiceStub: + """ElizaService provides a way to talk to Eliza, a port of the DOCTOR script + for Joseph Weizenbaum's original ELIZA program. Created in the mid-1960s at + the MIT Artificial Intelligence Laboratory, ELIZA demonstrates the + superficiality of human-computer communication. DOCTOR simulates a + psychotherapist, and is commonly found as an Easter egg in emacs + distributions. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Say = channel.unary_unary( + "/connectrpc.eliza.v1.ElizaService/Say", + request_serializer=example_dot_eliza__pb2.SayRequest.SerializeToString, + response_deserializer=example_dot_eliza__pb2.SayResponse.FromString, + _registered_method=True, + ) + self.Converse = channel.stream_stream( + "/connectrpc.eliza.v1.ElizaService/Converse", + request_serializer=example_dot_eliza__pb2.ConverseRequest.SerializeToString, + response_deserializer=example_dot_eliza__pb2.ConverseResponse.FromString, + _registered_method=True, + ) + self.Introduce = channel.unary_stream( + "/connectrpc.eliza.v1.ElizaService/Introduce", + request_serializer=example_dot_eliza__pb2.IntroduceRequest.SerializeToString, + response_deserializer=example_dot_eliza__pb2.IntroduceResponse.FromString, + _registered_method=True, + ) + + +class ElizaServiceServicer: + """ElizaService provides a way to talk to Eliza, a port of the DOCTOR script + for Joseph Weizenbaum's original ELIZA program. Created in the mid-1960s at + the MIT Artificial Intelligence Laboratory, ELIZA demonstrates the + superficiality of human-computer communication. DOCTOR simulates a + psychotherapist, and is commonly found as an Easter egg in emacs + distributions. + """ + + def Say(self, request, context): + """Say is a unary RPC. Eliza responds to the prompt with a single sentence.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + msg = "Method not implemented!" + raise NotImplementedError(msg) + + def Converse(self, request_iterator, context): + """Converse is a bidirectional RPC. The caller may exchange multiple + back-and-forth messages with Eliza over a long-lived connection. Eliza + responds to each ConverseRequest with a ConverseResponse. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + msg = "Method not implemented!" + raise NotImplementedError(msg) + + def Introduce(self, request, context): + """Introduce is a server streaming RPC. Given the caller's name, Eliza + returns a stream of sentences to introduce itself. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + msg = "Method not implemented!" + raise NotImplementedError(msg) + + +def add_ElizaServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "Say": grpc.unary_unary_rpc_method_handler( + servicer.Say, + request_deserializer=example_dot_eliza__pb2.SayRequest.FromString, + response_serializer=example_dot_eliza__pb2.SayResponse.SerializeToString, + ), + "Converse": grpc.stream_stream_rpc_method_handler( + servicer.Converse, + request_deserializer=example_dot_eliza__pb2.ConverseRequest.FromString, + response_serializer=example_dot_eliza__pb2.ConverseResponse.SerializeToString, + ), + "Introduce": grpc.unary_stream_rpc_method_handler( + servicer.Introduce, + request_deserializer=example_dot_eliza__pb2.IntroduceRequest.FromString, + response_serializer=example_dot_eliza__pb2.IntroduceResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "connectrpc.eliza.v1.ElizaService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers( + "connectrpc.eliza.v1.ElizaService", rpc_method_handlers + ) + + +# This class is part of an EXPERIMENTAL API. +class ElizaService: + """ElizaService provides a way to talk to Eliza, a port of the DOCTOR script + for Joseph Weizenbaum's original ELIZA program. Created in the mid-1960s at + the MIT Artificial Intelligence Laboratory, ELIZA demonstrates the + superficiality of human-computer communication. DOCTOR simulates a + psychotherapist, and is commonly found as an Easter egg in emacs + distributions. + """ + + @staticmethod + def Say( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/connectrpc.eliza.v1.ElizaService/Say", + example_dot_eliza__pb2.SayRequest.SerializeToString, + example_dot_eliza__pb2.SayResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def Converse( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.stream_stream( + request_iterator, + target, + "/connectrpc.eliza.v1.ElizaService/Converse", + example_dot_eliza__pb2.ConverseRequest.SerializeToString, + example_dot_eliza__pb2.ConverseResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def Introduce( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_stream( + request, + target, + "/connectrpc.eliza.v1.ElizaService/Introduce", + example_dot_eliza__pb2.IntroduceRequest.SerializeToString, + example_dot_eliza__pb2.IntroduceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/justfile b/justfile index 8f98457..16e2dfe 100644 --- a/justfile +++ b/justfile @@ -45,6 +45,10 @@ docs: docs-serve: docs uv run python -m http.server 8000 +# Generate gRPC status +generate-status: + go run github.com/bufbuild/buf/cmd/buf@{{BUF_VERSION}} generate + # Generate conformance files [working-directory: 'conformance'] generate-conformance: @@ -65,7 +69,7 @@ generate-test: go run github.com/bufbuild/buf/cmd/buf@{{BUF_VERSION}} generate # Run all generation targets, and format the generated code -generate: generate-conformance generate-example generate-test format +generate: generate-conformance generate-example generate-status generate-test format # Used in CI to verify that `just generate` doesn't produce a diff checkgenerate: generate diff --git a/proto/status.proto b/proto/status.proto new file mode 100644 index 0000000..0737367 --- /dev/null +++ b/proto/status.proto @@ -0,0 +1,91 @@ +// Copyright 2021-2025 The Connect Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The `Status` type defines a logical error model that is suitable for different +// programming environments, including REST APIs and RPC APIs. It is used by +// [gRPC](https://github.com/grpc). The error model is designed to be: +// +// - Simple to use and understand for most users +// - Flexible enough to meet unexpected needs +// +// # Overview +// +// The `Status` message contains three pieces of data: error code, error message, +// and error details. The error code should be an enum value of +// [google.rpc.Code][google.rpc.Code], but it may accept additional error codes if needed. The +// error message should be a developer-facing English message that helps +// developers *understand* and *resolve* the error. If a localized user-facing +// error message is needed, put the localized message in the error details or +// localize it in the client. The optional error details may contain arbitrary +// information about the error. There is a predefined set of error detail types +// in the package `google.rpc` which can be used for common error conditions. +// +// # Language mapping +// +// The `Status` message is the logical representation of the error model, but it +// is not necessarily the actual wire format. When the `Status` message is +// exposed in different client libraries and different wire protocols, it can be +// mapped differently. For example, it will likely be mapped to some exceptions +// in Java, but more likely mapped to some error codes in C. +// +// # Other uses +// +// The error model and the `Status` message can be used in a variety of +// environments, either with or without APIs, to provide a +// consistent developer experience across different environments. +// +// Example uses of this error model include: +// +// - Partial errors. If a service needs to return partial errors to the client, +// it may embed the `Status` in the normal response to indicate the partial +// errors. +// +// - Workflow errors. A typical workflow has multiple steps. Each step may +// have a `Status` message for error reporting purpose. +// +// - Batch operations. If a client uses batch request and batch response, the +// `Status` message should be used directly inside batch response, one for +// each error sub-response. +// +// - Asynchronous operations. If an API call embeds asynchronous operation +// results in its response, the status of those operations should be +// represented directly using the `Status` message. +// +// - Logging. If some API errors are stored in logs, the message `Status` could +// be used directly after any stripping needed for security/privacy reasons. +message Status { + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + string message = 2; + + // A list of messages that carry the error details. There will be a + // common set of message types for APIs to use. + repeated google.protobuf.Any details = 3; +} diff --git a/pyproject.toml b/pyproject.toml index f417098..932b16c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ dev = [ "httpx[http2]==0.28.1", "hypercorn==0.17.3", "granian==2.5.7", + "grpcio-tools==1.76.0", "gunicorn[gevent]==23.0.0", "just-bin==1.42.4; sys_platform != 'win32'", "mkdocs==1.6.1", @@ -51,7 +52,7 @@ dev = [ "mkdocstrings[python]==0.30.1", "pyright[nodejs]==1.1.405", "pytest-timeout==2.4.0", - "pyvoy==0.1.2; sys_platform != 'win32'", + "pyvoy==0.2.0", "ruff~=0.13.2", "uvicorn==0.37.0", # Needed to enable HTTP/2 in daphne @@ -214,6 +215,7 @@ typing-extensions = false "PERF", "D", ] +"**/*_grpc.py" = ["N", "FBT"] [tool.ruff.lint.isort] required-imports = ["from __future__ import annotations"] diff --git a/src/connectrpc/_client_async.py b/src/connectrpc/_client_async.py index 550ae2c..d8fcfc3 100644 --- a/src/connectrpc/_client_async.py +++ b/src/connectrpc/_client_async.py @@ -11,7 +11,7 @@ from . import _client_shared from ._asyncio_timeout import timeout as asyncio_timeout from ._codec import Codec, get_proto_binary_codec, get_proto_json_codec -from ._envelope import EnvelopeReader, EnvelopeWriter +from ._envelope import EnvelopeReader from ._interceptor_async import ( BidiStreamInterceptor, ClientStreamInterceptor, @@ -20,7 +20,12 @@ UnaryInterceptor, resolve_interceptors, ) -from ._protocol import CONNECT_STREAMING_HEADER_COMPRESSION, ConnectWireError +from ._protocol import ConnectWireError +from ._protocol_connect import ( + CONNECT_STREAMING_HEADER_COMPRESSION, + ConnectEnvelopeWriter, +) +from ._response_metadata import handle_response_headers from .code import Code from .errors import ConnectError @@ -306,7 +311,7 @@ async def _send_request_unary( resp.status_code, resp.headers.get("content-type", ""), ) - _client_shared.handle_response_headers(resp.headers) + handle_response_headers(resp.headers) if resp.status_code == 200: if ( @@ -376,7 +381,7 @@ async def _send_request_bidi_stream( _client_shared.validate_stream_response_content_type( self._codec.name(), resp.headers.get("content-type", "") ) - _client_shared.handle_response_headers(resp.headers) + handle_response_headers(resp.headers) if resp.status_code == 200: reader = EnvelopeReader( @@ -416,7 +421,7 @@ def _convert_connect_timeout(timeout_ms: float | None) -> Timeout: async def _streaming_request_content( msgs: AsyncIterator[Any], codec: Codec, compression: Compression | None ) -> AsyncIterator[bytes]: - writer = EnvelopeWriter(codec, compression) + writer = ConnectEnvelopeWriter(codec, compression) async for msg in msgs: yield writer.write(msg) diff --git a/src/connectrpc/_client_shared.py b/src/connectrpc/_client_shared.py index 2feef32..18cb166 100644 --- a/src/connectrpc/_client_shared.py +++ b/src/connectrpc/_client_shared.py @@ -1,15 +1,14 @@ from __future__ import annotations import base64 -import contextlib -from contextvars import ContextVar, Token from http import HTTPStatus from typing import TYPE_CHECKING, TypeVar from . import _compression from ._codec import CODEC_NAME_JSON, CODEC_NAME_JSON_CHARSET_UTF8, Codec from ._compression import Compression, get_available_compressions, get_compression -from ._protocol import ( +from ._protocol import ConnectWireError +from ._protocol_connect import ( CONNECT_PROTOCOL_VERSION, CONNECT_STREAMING_CONTENT_TYPE_PREFIX, CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION, @@ -17,7 +16,6 @@ CONNECT_UNARY_CONTENT_TYPE_PREFIX, CONNECT_UNARY_HEADER_ACCEPT_COMPRESSION, CONNECT_UNARY_HEADER_COMPRESSION, - ConnectWireError, codec_name_from_content_type, ) from ._version import __version__ @@ -26,8 +24,7 @@ from .request import Headers, RequestContext if TYPE_CHECKING: - from collections.abc import Iterable, Mapping, Sequence - from types import TracebackType + from collections.abc import Iterable, Mapping from httpx import Headers as HttpxHeaders @@ -123,9 +120,6 @@ def prepare_get_params( return params -_current_response = ContextVar["ResponseMetadata"]("connectrpc_current_response") - - def validate_response_content_encoding( encoding: str | None, ) -> _compression.Compression: @@ -197,89 +191,3 @@ def validate_stream_response_content_type( Code.INTERNAL, f"invalid content-type: '{response_content_type}'; expecting '{CONNECT_STREAMING_CONTENT_TYPE_PREFIX}{request_codec_name}'", ) - - -def handle_response_headers(headers: HttpxHeaders) -> None: - response = _current_response.get(None) - if not response: - return - - response_headers: Headers = Headers() - response_trailers: Headers = Headers() - for key, value in headers.multi_items(): - if key.startswith("trailer-"): - normalized_key = key[len("trailer-") :] - obj = response_trailers - else: - normalized_key = key - obj = response_headers - obj.add(normalized_key, value) - if response_headers: - response._headers = response_headers # noqa: SLF001 - if response_trailers: - response._trailers = response_trailers # noqa: SLF001 - - -def handle_response_trailers(trailers: Mapping[str, Sequence[str]]) -> None: - response = _current_response.get(None) - if not response: - return - response_trailers = response.trailers() - for key, values in trailers.items(): - for value in values: - response_trailers.add(key, value) - if response_trailers: - response._trailers = response_trailers # noqa: SLF001 - - -class ResponseMetadata: - """ - Response metadata separate from the message payload. - - Commonly, RPC client invocations only need the message payload and do not need to - directly read other data such as headers or trailers. In cases where they are needed, - initialize this class in a context manager to access the response headers and trailers - for the invocation made within the context. - - Example: - - with ResponseMetadata() as resp_data: - resp = client.MakeHat(Size(inches=10)) - do_something_with_response_payload(resp) - check_response_headers(resp_data.headers()) - check_response_trailers(resp_data.trailers()) - """ - - _headers: Headers | None = None - _trailers: Headers | None = None - _token: Token[ResponseMetadata] | None = None - - def __enter__(self) -> ResponseMetadata: - self._token = _current_response.set(self) - return self - - def __exit__( - self, - _exc_type: type[BaseException] | None, - _exc_value: BaseException | None, - _traceback: TracebackType | None, - ) -> None: - if self._token: - # Normal usage with context manager will always work but it is - # theoretically possible for user to move to another thread - # and this fails, it is fine to ignore it. - with contextlib.suppress(Exception): - _current_response.reset(self._token) - self._token = None - - def headers(self) -> Headers: - """Returns the response headers.""" - if self._headers is None: - return Headers() - return self._headers - - def trailers(self) -> Headers: - """Returns the response trailers.""" - if self._trailers is None: - return Headers() - return self._trailers diff --git a/src/connectrpc/_client_sync.py b/src/connectrpc/_client_sync.py index b37f86f..bbf5d70 100644 --- a/src/connectrpc/_client_sync.py +++ b/src/connectrpc/_client_sync.py @@ -8,7 +8,7 @@ from . import _client_shared from ._codec import Codec, get_proto_binary_codec, get_proto_json_codec -from ._envelope import EnvelopeReader, EnvelopeWriter +from ._envelope import EnvelopeReader from ._interceptor_sync import ( BidiStreamInterceptorSync, ClientStreamInterceptorSync, @@ -17,7 +17,12 @@ UnaryInterceptorSync, resolve_interceptors, ) -from ._protocol import CONNECT_STREAMING_HEADER_COMPRESSION, ConnectWireError +from ._protocol import ConnectWireError +from ._protocol_connect import ( + CONNECT_STREAMING_HEADER_COMPRESSION, + ConnectEnvelopeWriter, +) +from ._response_metadata import handle_response_headers from .code import Code from .errors import ConnectError @@ -290,7 +295,7 @@ def _send_request_unary(self, request: REQ, ctx: RequestContext[REQ, RES]) -> RE resp.status_code, resp.headers.get("content-type", ""), ) - _client_shared.handle_response_headers(resp.headers) + handle_response_headers(resp.headers) if resp.status_code == 200: if ( @@ -352,7 +357,7 @@ def _send_request_bidi_stream( _client_shared.validate_stream_response_content_type( self._codec.name(), resp.headers.get("content-type", "") ) - _client_shared.handle_response_headers(resp.headers) + handle_response_headers(resp.headers) if resp.status_code == 200: reader = EnvelopeReader( @@ -397,7 +402,7 @@ def _convert_connect_timeout(timeout_ms: float | None) -> Timeout: def _streaming_request_content( msgs: Iterator[Any], codec: Codec, compression: Compression | None ) -> Iterator[bytes]: - writer = EnvelopeWriter(codec, compression) + writer = ConnectEnvelopeWriter(codec, compression) for msg in msgs: yield writer.write(msg) diff --git a/src/connectrpc/_envelope.py b/src/connectrpc/_envelope.py index 175b0c5..ea2bf33 100644 --- a/src/connectrpc/_envelope.py +++ b/src/connectrpc/_envelope.py @@ -2,11 +2,12 @@ import json import struct +from abc import ABC, abstractmethod from typing import TYPE_CHECKING, Any, Generic, TypeVar -from ._client_shared import handle_response_trailers from ._compression import Compression, IdentityCompression from ._protocol import ConnectWireError +from ._response_metadata import handle_response_trailers from .code import Code from .errors import ConnectError @@ -95,7 +96,7 @@ def _read_messages(self) -> Iterator[_RES]: self._next_message_length = int.from_bytes(self._buffer[1:5], "big") -class EnvelopeWriter(Generic[_T]): +class EnvelopeWriter(ABC, Generic[_T]): def __init__(self, codec: Codec[_T, Any], compression: Compression | None) -> None: self._codec = codec self._compression = compression @@ -111,16 +112,7 @@ def write(self, message: _T) -> bytes: # I/O multiple times for small prefix / length elements. return struct.pack(">BI", self._prefix, len(data)) + data - def end(self, trailers: Headers, error: ConnectWireError | None) -> bytes: - end_message = {} - if trailers: - metadata: dict[str, list[str]] = {} - for key, value in trailers.allitems(): - metadata.setdefault(key, []).append(value) - end_message["metadata"] = metadata - if error: - end_message["error"] = error.to_dict() - data = json.dumps(end_message).encode() - if self._compression: - data = self._compression.compress(data) - return struct.pack(">BI", self._prefix | 0b10, len(data)) + data + @abstractmethod + def end( + self, user_trailers: Headers, error: ConnectWireError | None + ) -> bytes | Headers: ... diff --git a/src/connectrpc/_gen/status_pb2.py b/src/connectrpc/_gen/status_pb2.py new file mode 100644 index 0000000..b62ebee --- /dev/null +++ b/src/connectrpc/_gen/status_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: status.proto +# Protobuf Python Version: 5.26.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cstatus.proto\x12\ngoogle.rpc\x1a\x19google/protobuf/any.proto\"f\n\x06Status\x12\x12\n\x04\x63ode\x18\x01 \x01(\x05R\x04\x63ode\x12\x18\n\x07message\x18\x02 \x01(\tR\x07message\x12.\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyR\x07\x64\x65tailsB^\n\x0e\x63om.google.rpcB\x0bStatusProtoP\x01Z7google.golang.org/genproto/googleapis/rpc/status;status\xa2\x02\x03RPCb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'status_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\016com.google.rpcB\013StatusProtoP\001Z7google.golang.org/genproto/googleapis/rpc/status;status\242\002\003RPC' + _globals['_STATUS']._serialized_start=55 + _globals['_STATUS']._serialized_end=157 +# @@protoc_insertion_point(module_scope) diff --git a/src/connectrpc/_gen/status_pb2.pyi b/src/connectrpc/_gen/status_pb2.pyi new file mode 100644 index 0000000..0656223 --- /dev/null +++ b/src/connectrpc/_gen/status_pb2.pyi @@ -0,0 +1,17 @@ +from google.protobuf import any_pb2 as _any_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class Status(_message.Message): + __slots__ = ("code", "message", "details") + CODE_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + DETAILS_FIELD_NUMBER: _ClassVar[int] + code: int + message: str + details: _containers.RepeatedCompositeFieldContainer[_any_pb2.Any] + def __init__(self, code: _Optional[int] = ..., message: _Optional[str] = ..., details: _Optional[_Iterable[_Union[_any_pb2.Any, _Mapping]]] = ...) -> None: ... diff --git a/src/connectrpc/_protocol.py b/src/connectrpc/_protocol.py index 0136d35..ef511f7 100644 --- a/src/connectrpc/_protocol.py +++ b/src/connectrpc/_protocol.py @@ -4,10 +4,11 @@ from base64 import b64decode, b64encode from dataclasses import dataclass from http import HTTPStatus -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, Protocol, TypeVar, cast from google.protobuf.any_pb2 import Any +from ._compression import Compression from .code import Code from .errors import ConnectError @@ -16,17 +17,15 @@ import httpx -CONNECT_HEADER_PROTOCOL_VERSION = "connect-protocol-version" -CONNECT_PROTOCOL_VERSION = "1" -CONNECT_UNARY_CONTENT_TYPE_PREFIX = "application/" -CONNECT_STREAMING_CONTENT_TYPE_PREFIX = "application/connect+" + from ._codec import Codec + from ._compression import Compression + from ._envelope import EnvelopeWriter + from .method import MethodInfo + from .request import Headers, RequestContext -CONNECT_UNARY_HEADER_COMPRESSION = "content-encoding" -CONNECT_UNARY_HEADER_ACCEPT_COMPRESSION = "accept-encoding" -CONNECT_STREAMING_HEADER_COMPRESSION = "connect-content-encoding" -CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION = "connect-accept-encoding" - -CONNECT_HEADER_TIMEOUT = "connect-timeout-ms" +REQ = TypeVar("REQ") +RES = TypeVar("RES") +T = TypeVar("T") # Define a custom class for HTTP Status to allow adding 499 status code @@ -179,22 +178,45 @@ def to_json_bytes(self) -> bytes: return json.dumps(self.to_dict()).encode("utf-8") +class ServerProtocol(Protocol): + def create_request_context( + self, method: MethodInfo[REQ, RES], http_method: str, headers: Headers + ) -> RequestContext[REQ, RES]: + """Creates a RequestContext from the HTTP method and headers.""" + ... + + def create_envelope_writer( + self, codec: Codec[T, Any], compression: Compression | None + ) -> EnvelopeWriter[T]: + """Creates the EnvelopeWriter to write response messages.""" + ... + + def uses_trailers(self) -> bool: + """Returns whether the protocol uses trailers for status reporting.""" + ... + + def content_type(self, codec: Codec) -> str: + """Returns the content type for the given codec.""" + ... + + def compression_header_name(self) -> str: + """Returns the compression header name and value.""" + ... + + def codec_name_from_content_type(self, content_type: str, *, stream: bool) -> str: + """Extracts the codec name from the content type.""" + ... + + def negotiate_stream_compression( + self, headers: Headers + ) -> tuple[Compression | None, Compression]: + """Negotiates request and response compression based on headers.""" + ... + + class HTTPException(Exception): """An HTTP exception returned directly before starting the connect protocol.""" def __init__(self, status: HTTPStatus, headers: list[tuple[str, str]]) -> None: self.status = status self.headers = headers - - -def codec_name_from_content_type(content_type: str, *, stream: bool) -> str: - prefix = ( - CONNECT_STREAMING_CONTENT_TYPE_PREFIX - if stream - else CONNECT_UNARY_CONTENT_TYPE_PREFIX - ) - if content_type.startswith(prefix): - return content_type[len(prefix) :] - # Follow connect-go behavior for malformed content type. If the content type misses the prefix, - # it will still be coincidentally handled. - return content_type diff --git a/src/connectrpc/_protocol_connect.py b/src/connectrpc/_protocol_connect.py new file mode 100644 index 0000000..e6cf01d --- /dev/null +++ b/src/connectrpc/_protocol_connect.py @@ -0,0 +1,136 @@ +from __future__ import annotations + +import json +import struct +from http import HTTPStatus +from typing import TYPE_CHECKING, Any, TypeVar + +from ._compression import IdentityCompression, get_compression, negotiate_compression +from ._envelope import EnvelopeWriter +from ._protocol import ConnectWireError, HTTPException +from .code import Code +from .errors import ConnectError +from .method import IdempotencyLevel, MethodInfo +from .request import Headers, RequestContext + +if TYPE_CHECKING: + from ._codec import Codec + from ._compression import Compression + +REQ = TypeVar("REQ") +RES = TypeVar("RES") + +CONNECT_HEADER_PROTOCOL_VERSION = "connect-protocol-version" +CONNECT_PROTOCOL_VERSION = "1" +CONNECT_HEADER_TIMEOUT = "connect-timeout-ms" +CONNECT_UNARY_CONTENT_TYPE_PREFIX = "application/" +CONNECT_STREAMING_CONTENT_TYPE_PREFIX = "application/connect+" +CONNECT_UNARY_HEADER_COMPRESSION = "content-encoding" +CONNECT_UNARY_HEADER_ACCEPT_COMPRESSION = "accept-encoding" +CONNECT_STREAMING_HEADER_COMPRESSION = "connect-content-encoding" +CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION = "connect-accept-encoding" + + +def codec_name_from_content_type(content_type: str, *, stream: bool) -> str: + prefix = ( + CONNECT_STREAMING_CONTENT_TYPE_PREFIX + if stream + else CONNECT_UNARY_CONTENT_TYPE_PREFIX + ) + if content_type.startswith(prefix): + return content_type[len(prefix) :] + # Follow connect-go behavior for malformed content type. If the content type misses the prefix, + # it will still be coincidentally handled. + return content_type + + +class ConnectServerProtocol: + def create_request_context( + self, method: MethodInfo[REQ, RES], http_method: str, headers: Headers + ) -> RequestContext[REQ, RES]: + if method.idempotency_level == IdempotencyLevel.NO_SIDE_EFFECTS: + if http_method not in ("GET", "POST"): + raise HTTPException( + HTTPStatus.METHOD_NOT_ALLOWED, [("allow", "GET, POST")] + ) + elif http_method != "POST": + raise HTTPException(HTTPStatus.METHOD_NOT_ALLOWED, [("allow", "POST")]) + + # We don't require connect-protocol-version header. connect-go provides an option + # to require it but it's almost never used in practice. + connect_protocol_version = headers.get( + CONNECT_HEADER_PROTOCOL_VERSION, CONNECT_PROTOCOL_VERSION + ) + if connect_protocol_version != CONNECT_PROTOCOL_VERSION: + raise ConnectError( + Code.INVALID_ARGUMENT, + f"connect-protocol-version must be '1': got '{connect_protocol_version}'", + ) + + timeout_header = headers.get(CONNECT_HEADER_TIMEOUT) + if timeout_header: + if len(timeout_header) > 10: + raise ConnectError( + Code.INVALID_ARGUMENT, + f"Invalid timeout header: '{timeout_header} has >10 digits", + ) + try: + timeout_ms = int(timeout_header) + except ValueError as e: + raise ConnectError( + Code.INVALID_ARGUMENT, f"Invalid timeout header: '{timeout_header}'" + ) from e + else: + timeout_ms = None + return RequestContext( + method=method, + http_method=http_method, + request_headers=headers, + timeout_ms=timeout_ms, + ) + + def create_envelope_writer( + self, codec: Codec[RES, Any], compression: Compression | None + ) -> EnvelopeWriter[RES]: + return ConnectEnvelopeWriter(codec, compression) + + def uses_trailers(self) -> bool: + return False + + def content_type(self, codec: Codec) -> str: + return f"{CONNECT_STREAMING_CONTENT_TYPE_PREFIX}{codec.name()}" + + def compression_header_name(self) -> str: + return CONNECT_STREAMING_HEADER_COMPRESSION + + def codec_name_from_content_type(self, content_type: str, *, stream: bool) -> str: + return codec_name_from_content_type(content_type, stream=stream) + + def negotiate_stream_compression( + self, headers: Headers + ) -> tuple[Compression, Compression]: + req_compression_name = headers.get( + CONNECT_STREAMING_HEADER_COMPRESSION, "identity" + ) + req_compression = get_compression(req_compression_name) or IdentityCompression() + accept_compression = headers.get( + CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION, "" + ) + resp_compression = negotiate_compression(accept_compression) + return req_compression, resp_compression + + +class ConnectEnvelopeWriter(EnvelopeWriter): + def end(self, user_trailers: Headers, error: ConnectWireError | None) -> bytes: + end_message = {} + if user_trailers: + metadata: dict[str, list[str]] = {} + for key, value in user_trailers.allitems(): + metadata.setdefault(key, []).append(value) + end_message["metadata"] = metadata + if error: + end_message["error"] = error.to_dict() + data = json.dumps(end_message).encode() + if self._compression: + data = self._compression.compress(data) + return struct.pack(">BI", self._prefix | 0b10, len(data)) + data diff --git a/src/connectrpc/_protocol_grpc.py b/src/connectrpc/_protocol_grpc.py new file mode 100644 index 0000000..ad8656a --- /dev/null +++ b/src/connectrpc/_protocol_grpc.py @@ -0,0 +1,153 @@ +from __future__ import annotations + +import urllib.parse +from base64 import b64encode +from http import HTTPStatus +from typing import TYPE_CHECKING, Any, TypeVar + +from ._compression import get_compression, negotiate_compression +from ._envelope import EnvelopeWriter +from ._gen.status_pb2 import Status +from ._protocol import ConnectWireError, HTTPException +from .code import Code +from .request import Headers, RequestContext + +if TYPE_CHECKING: + from ._codec import Codec + from ._compression import Compression + from .method import MethodInfo + +REQ = TypeVar("REQ") +RES = TypeVar("RES") + +GRPC_CONTENT_TYPE_DEFAULT = "application/grpc" +GRPC_CONTENT_TYPE_PREFIX = f"{GRPC_CONTENT_TYPE_DEFAULT}+" + +GRPC_HEADER_TIMEOUT = "grpc-timeout" +GRPC_HEADER_COMPRESSION = "grpc-encoding" +GRPC_HEADER_ACCEPT_COMPRESSION = "grpc-accept-encoding" + + +class GRPCServerProtocol: + def create_request_context( + self, method: MethodInfo[REQ, RES], http_method: str, headers: Headers + ) -> RequestContext[REQ, RES]: + if http_method != "POST": + raise HTTPException(HTTPStatus.METHOD_NOT_ALLOWED, [("allow", "POST")]) + + timeout_header = headers.get(GRPC_HEADER_TIMEOUT) + timeout_ms = _parse_timeout(timeout_header) if timeout_header else None + + return RequestContext( + method=method, + http_method=http_method, + request_headers=headers, + timeout_ms=timeout_ms, + ) + + def create_envelope_writer( + self, codec: Codec[RES, Any], compression: Compression | None + ) -> EnvelopeWriter[RES]: + return GRPCEnvelopeWriter(codec, compression) + + def uses_trailers(self) -> bool: + return True + + def content_type(self, codec: Codec) -> str: + return f"{GRPC_CONTENT_TYPE_PREFIX}{codec.name()}" + + def compression_header_name(self) -> str: + return GRPC_HEADER_COMPRESSION + + def codec_name_from_content_type(self, content_type: str, *, stream: bool) -> str: + if content_type.startswith(GRPC_CONTENT_TYPE_PREFIX): + return content_type[len(GRPC_CONTENT_TYPE_PREFIX) :] + return "proto" + + def negotiate_stream_compression( + self, headers: Headers + ) -> tuple[Compression | None, Compression]: + req_compression_name = headers.get(GRPC_HEADER_COMPRESSION, "identity") + req_compression = get_compression(req_compression_name) + accept_compression = headers.get(GRPC_HEADER_ACCEPT_COMPRESSION, "") + resp_compression = negotiate_compression(accept_compression) + return req_compression, resp_compression + + +def _parse_timeout(timeout: str) -> int: + # We normalize to int milliseconds matching connect's timeout header. + value_to_ms = _lookup_timeout_unit(timeout[-1]) + try: + value = int(timeout[:-1]) + except ValueError as e: + msg = f"protocol error: invalid timeout '{timeout}'" + raise ValueError(msg) from e + + # timeout must be ASCII string of at most 8 digits + if value > 99999999: + msg = f"protocol error: timeout '{timeout}' is too long" + raise ValueError(msg) + + return int(value * value_to_ms) + + +def _lookup_timeout_unit(unit: str) -> float: + match unit: + case "H": + return 60 * 60 * 1000 + case "M": + return 60 * 1000 + case "S": + return 1 * 1000 + case "m": + return 1 + case "u": + return 1 / 1000 + case "n": + return 1 / 1000 / 1000 + case _: + msg = f"protocol error: timeout has invalid unit '{unit}'" + raise ValueError(msg) + + +class GRPCEnvelopeWriter(EnvelopeWriter): + def end(self, user_trailers: Headers, error: ConnectWireError | None) -> Headers: + trailers = Headers(list(user_trailers.allitems())) + if error: + status = _connect_status_to_grpc[error.code] + trailers["grpc-status"] = status + message = error.message + if message: + message = urllib.parse.quote(message, safe="") + trailers["grpc-message"] = message + if error.details: + grpc_status = Status( + code=int(status), message=error.message, details=error.details + ) + grpc_status_bin = ( + b64encode(grpc_status.SerializeToString()).decode().rstrip("=") + ) + trailers["grpc-status-details-bin"] = grpc_status_bin + else: + trailers["grpc-status"] = "0" + return trailers + + +_connect_status_to_grpc = { + Code.CANCELED: "1", + Code.UNKNOWN: "2", + Code.INVALID_ARGUMENT: "3", + Code.DEADLINE_EXCEEDED: "4", + Code.NOT_FOUND: "5", + Code.ALREADY_EXISTS: "6", + Code.PERMISSION_DENIED: "7", + Code.RESOURCE_EXHAUSTED: "8", + Code.FAILED_PRECONDITION: "9", + Code.ABORTED: "10", + Code.OUT_OF_RANGE: "11", + Code.UNIMPLEMENTED: "12", + Code.INTERNAL: "13", + Code.UNAVAILABLE: "14", + Code.DATA_LOSS: "15", + Code.UNAUTHENTICATED: "16", +} diff --git a/src/connectrpc/_protocol_server.py b/src/connectrpc/_protocol_server.py new file mode 100644 index 0000000..3cc04b4 --- /dev/null +++ b/src/connectrpc/_protocol_server.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from ._protocol_connect import ConnectServerProtocol +from ._protocol_grpc import ( + GRPC_CONTENT_TYPE_DEFAULT, + GRPC_CONTENT_TYPE_PREFIX, + GRPCServerProtocol, +) + +if TYPE_CHECKING: + from ._protocol import ServerProtocol + + +def negotiate_server_protocol(content_type: str) -> ServerProtocol: + if content_type == GRPC_CONTENT_TYPE_DEFAULT or content_type.startswith( + GRPC_CONTENT_TYPE_PREFIX + ): + return GRPCServerProtocol() + return ConnectServerProtocol() diff --git a/src/connectrpc/_response_metadata.py b/src/connectrpc/_response_metadata.py new file mode 100644 index 0000000..a08e2b8 --- /dev/null +++ b/src/connectrpc/_response_metadata.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import contextlib +from contextvars import ContextVar, Token +from typing import TYPE_CHECKING + +from .request import Headers + +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence + from types import TracebackType + + from httpx import Headers as HttpxHeaders + + +_current_response = ContextVar["ResponseMetadata"]("connectrpc_current_response") + + +def handle_response_headers(headers: HttpxHeaders) -> None: + response = _current_response.get(None) + if not response: + return + + response_headers: Headers = Headers() + response_trailers: Headers = Headers() + for key, value in headers.multi_items(): + if key.startswith("trailer-"): + normalized_key = key[len("trailer-") :] + obj = response_trailers + else: + normalized_key = key + obj = response_headers + obj.add(normalized_key, value) + if response_headers: + response._headers = response_headers # noqa: SLF001 + if response_trailers: + response._trailers = response_trailers # noqa: SLF001 + + +def handle_response_trailers(trailers: Mapping[str, Sequence[str]]) -> None: + response = _current_response.get(None) + if not response: + return + response_trailers = response.trailers() + for key, values in trailers.items(): + for value in values: + response_trailers.add(key, value) + if response_trailers: + response._trailers = response_trailers # noqa: SLF001 + + +class ResponseMetadata: + """ + Response metadata separate from the message payload. + + Commonly, RPC client invocations only need the message payload and do not need to + directly read other data such as headers or trailers. In cases where they are needed, + initialize this class in a context manager to access the response headers and trailers + for the invocation made within the context. + + Example: + + with ResponseMetadata() as resp_data: + resp = client.MakeHat(Size(inches=10)) + do_something_with_response_payload(resp) + check_response_headers(resp_data.headers()) + check_response_trailers(resp_data.trailers()) + """ + + _headers: Headers | None = None + _trailers: Headers | None = None + _token: Token[ResponseMetadata] | None = None + + def __enter__(self) -> ResponseMetadata: + self._token = _current_response.set(self) + return self + + def __exit__( + self, + _exc_type: type[BaseException] | None, + _exc_value: BaseException | None, + _traceback: TracebackType | None, + ) -> None: + if self._token: + # Normal usage with context manager will always work but it is + # theoretically possible for user to move to another thread + # and this fails, it is fine to ignore it. + with contextlib.suppress(Exception): + _current_response.reset(self._token) + self._token = None + + def headers(self) -> Headers: + """Returns the response headers.""" + if self._headers is None: + return Headers() + return self._headers + + def trailers(self) -> Headers: + """Returns the response trailers.""" + if self._trailers is None: + return Headers() + return self._trailers diff --git a/src/connectrpc/_server_async.py b/src/connectrpc/_server_async.py index 1390951..490cf48 100644 --- a/src/connectrpc/_server_async.py +++ b/src/connectrpc/_server_async.py @@ -12,7 +12,7 @@ from . import _compression, _server_shared from ._codec import Codec, get_codec -from ._envelope import EnvelopeReader, EnvelopeWriter +from ._envelope import EnvelopeReader from ._interceptor_async import ( BidiStreamInterceptor, ClientStreamInterceptor, @@ -21,15 +21,9 @@ UnaryInterceptor, resolve_interceptors, ) -from ._protocol import ( - CONNECT_STREAMING_CONTENT_TYPE_PREFIX, - CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION, - CONNECT_STREAMING_HEADER_COMPRESSION, - CONNECT_UNARY_CONTENT_TYPE_PREFIX, - ConnectWireError, - HTTPException, - codec_name_from_content_type, -) +from ._protocol import ConnectWireError, HTTPException, ServerProtocol +from ._protocol_connect import CONNECT_UNARY_CONTENT_TYPE_PREFIX, ConnectServerProtocol +from ._protocol_server import negotiate_server_protocol from ._server_shared import ( EndpointBidiStream, EndpointClientStream, @@ -172,9 +166,15 @@ async def __call__( http_method = scope["method"] headers = _process_headers(scope.get("headers", ())) - ctx = _server_shared.create_request_context( - endpoint.method, http_method, headers - ) + content_type = headers.get("content-type", "") + protocol = negotiate_server_protocol(content_type) + if protocol.uses_trailers() and "http.response.trailers" not in cast( + "dict", scope.get("extensions", {}) + ): + msg = f"ASGI server does not support ASGI trailers extension but protocol for content-type '{content_type}' requires trailers" + raise RuntimeError(msg) + + ctx = protocol.create_request_context(endpoint.method, http_method, headers) is_unary = isinstance(endpoint, EndpointUnary) @@ -184,7 +184,7 @@ async def __call__( codec_name = query_params.get("encoding", ("",))[0] else: query_params = _UNSET_QUERY_PARAMS - codec_name = codec_name_from_content_type( + codec_name = protocol.codec_name_from_content_type( headers.get("content-type", ""), stream=not is_unary ) codec = get_codec(codec_name.lower()) @@ -194,8 +194,8 @@ async def __call__( [("Accept-Post", "application/json, application/proto")], ) - if is_unary: - return await self._handle_unary( + if is_unary and isinstance(protocol, ConnectServerProtocol): + return await self._handle_unary_connect( http_method, headers, codec, @@ -209,9 +209,11 @@ async def __call__( return await self._handle_error(e, ctx, send) # Streams have their own error handling so move out of the try block. - return await self._handle_stream(receive, send, endpoint, codec, headers, ctx) + return await self._handle_stream( + receive, send, protocol, endpoint, codec, headers, ctx + ) - async def _handle_unary( + async def _handle_unary_connect( self, http_method: str, headers: Headers, @@ -338,30 +340,25 @@ async def _handle_stream( self, receive: ASGIReceiveCallable, send: ASGISendCallable, - endpoint: EndpointBidiStream[_REQ, _RES] - | EndpointClientStream[_REQ, _RES] - | EndpointServerStream[_REQ, _RES], + protocol: ServerProtocol, + endpoint: Endpoint[_REQ, _RES], codec: Codec, headers: Headers, ctx: _server_shared.RequestContext, ) -> None: - req_compression_name = headers.get( - CONNECT_STREAMING_HEADER_COMPRESSION, "identity" + req_compression, resp_compression = protocol.negotiate_stream_compression( + headers ) - req_compression = ( - _compression.get_compression(req_compression_name) - or _compression.IdentityCompression() - ) - accept_compression = headers.get( - CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION, "" - ) - response_compression = _compression.negotiate_compression(accept_compression) - writer = EnvelopeWriter(codec, response_compression) + writer = protocol.create_envelope_writer(codec, resp_compression) error: Exception | None = None sent_headers = False try: + if not req_compression: + raise ConnectError( + Code.UNIMPLEMENTED, "Unrecognized request compression" + ) request_stream = _request_stream( receive, endpoint.method.input, @@ -371,6 +368,10 @@ async def _handle_stream( ) match endpoint: + case EndpointUnary(): + request = await _consume_single_request(request_stream) + response = await endpoint.function(request, ctx) + response_stream = _yield_single_response(response) case EndpointClientStream(): response = await endpoint.function(request_stream, ctx) response_stream = _yield_single_response(response) @@ -385,7 +386,7 @@ async def _handle_stream( # response headers. if not sent_headers: await _send_stream_response_headers( - send, codec, response_compression.name(), ctx + send, protocol, codec, resp_compression.name(), ctx ) sent_headers = True @@ -398,21 +399,36 @@ async def _handle_stream( except Exception as e: error = e finally: + end_message = writer.end( + ctx.response_trailers(), + ConnectWireError.from_exception(error) if error else None, + ) if not sent_headers: # Exception before any response message is returned await _send_stream_response_headers( - send, codec, response_compression.name(), ctx + send, protocol, codec, resp_compression.name(), ctx + ) + if isinstance(end_message, bytes): + await send( + { + "type": "http.response.body", + "body": end_message, + "more_body": False, + } + ) + else: + await send( + {"type": "http.response.body", "body": b"", "more_body": False} + ) + await send( + { + "type": "http.response.trailers", + "headers": [ + (k.encode(), v.encode()) for k, v in end_message.allitems() + ], + "more_trailers": False, + } ) - await send( - { - "type": "http.response.body", - "body": writer.end( - ctx.response_trailers(), - ConnectWireError.from_exception(error) if error else None, - ), - "more_body": False, - } - ) async def _handle_error( self, exc: Exception, ctx: RequestContext | None, send: ASGISendCallable @@ -457,14 +473,15 @@ def _resolve_endpoints(self, service: _SVC) -> Mapping[str, Endpoint]: async def _send_stream_response_headers( - send: ASGISendCallable, codec: Codec, compression_name: str, ctx: RequestContext + send: ASGISendCallable, + protocol: ServerProtocol, + codec: Codec, + compression_name: str, + ctx: RequestContext, ) -> None: response_headers = [ - ( - b"content-type", - f"{CONNECT_STREAMING_CONTENT_TYPE_PREFIX}{codec.name()}".encode(), - ), - (CONNECT_STREAMING_HEADER_COMPRESSION.encode(), compression_name.encode()), + (b"content-type", protocol.content_type(codec).encode()), + (protocol.compression_header_name().encode(), compression_name.encode()), ] response_headers.extend( (key.encode(), value.encode()) @@ -475,7 +492,7 @@ async def _send_stream_response_headers( "type": "http.response.start", "status": 200, "headers": response_headers, - "trailers": False, + "trailers": protocol.uses_trailers(), } ) diff --git a/src/connectrpc/_server_shared.py b/src/connectrpc/_server_shared.py index edf5504..18f8f83 100644 --- a/src/connectrpc/_server_shared.py +++ b/src/connectrpc/_server_shared.py @@ -1,23 +1,14 @@ from __future__ import annotations from dataclasses import dataclass -from http import HTTPStatus from typing import TYPE_CHECKING, Generic, TypeVar -from ._protocol import ( - CONNECT_HEADER_PROTOCOL_VERSION, - CONNECT_HEADER_TIMEOUT, - CONNECT_PROTOCOL_VERSION, - HTTPException, -) -from .code import Code -from .errors import ConnectError -from .method import IdempotencyLevel, MethodInfo -from .request import Headers, RequestContext - if TYPE_CHECKING: from collections.abc import AsyncIterator, Awaitable, Callable, Iterator + from .method import MethodInfo + from .request import RequestContext + REQ = TypeVar("REQ") RES = TypeVar("RES") T = TypeVar("T") @@ -145,46 +136,3 @@ class EndpointServerStreamSync(EndpointSync[REQ, RES]): @dataclass(kw_only=True, frozen=True, slots=True) class EndpointBidiStreamSync(EndpointSync[REQ, RES]): function: Callable[[Iterator[REQ], RequestContext[REQ, RES]], Iterator[RES]] - - -def create_request_context( - method: MethodInfo[REQ, RES], http_method: str, headers: Headers -) -> RequestContext[REQ, RES]: - if method.idempotency_level == IdempotencyLevel.NO_SIDE_EFFECTS: - if http_method not in ("GET", "POST"): - raise HTTPException(HTTPStatus.METHOD_NOT_ALLOWED, [("allow", "GET, POST")]) - elif http_method != "POST": - raise HTTPException(HTTPStatus.METHOD_NOT_ALLOWED, [("allow", "POST")]) - - # We don't require connect-protocol-version header. connect-go provides an option - # to require it but it's almost never used in practice. - connect_protocol_version = headers.get( - CONNECT_HEADER_PROTOCOL_VERSION, CONNECT_PROTOCOL_VERSION - ) - if connect_protocol_version != CONNECT_PROTOCOL_VERSION: - raise ConnectError( - Code.INVALID_ARGUMENT, - f"connect-protocol-version must be '1': got '{connect_protocol_version}'", - ) - - timeout_header = headers.get(CONNECT_HEADER_TIMEOUT) - if timeout_header: - if len(timeout_header) > 10: - raise ConnectError( - Code.INVALID_ARGUMENT, - f"Invalid timeout header: '{timeout_header} has >10 digits", - ) - try: - timeout_ms = int(timeout_header) - except ValueError as e: - raise ConnectError( - Code.INVALID_ARGUMENT, f"Invalid timeout header: '{timeout_header}'" - ) from e - else: - timeout_ms = None - return RequestContext( - method=method, - http_method=http_method, - request_headers=headers, - timeout_ms=timeout_ms, - ) diff --git a/src/connectrpc/_server_sync.py b/src/connectrpc/_server_sync.py index 9beba1a..d3f2b24 100644 --- a/src/connectrpc/_server_sync.py +++ b/src/connectrpc/_server_sync.py @@ -20,15 +20,13 @@ ServerStreamInterceptorSync, UnaryInterceptorSync, ) -from ._protocol import ( - CONNECT_STREAMING_CONTENT_TYPE_PREFIX, - CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION, - CONNECT_STREAMING_HEADER_COMPRESSION, +from ._protocol import ConnectWireError, HTTPException, ServerProtocol +from ._protocol_connect import ( CONNECT_UNARY_CONTENT_TYPE_PREFIX, - ConnectWireError, - HTTPException, + ConnectServerProtocol, codec_name_from_content_type, ) +from ._protocol_server import negotiate_server_protocol from ._server_shared import ( EndpointBidiStreamSync, EndpointClientStreamSync, @@ -41,7 +39,7 @@ if TYPE_CHECKING: import sys - from collections.abc import Iterable, Iterator, Mapping, Sequence + from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from io import BytesIO if sys.version_info >= (3, 11): @@ -204,23 +202,26 @@ def __call__( http_method = environ["REQUEST_METHOD"] headers = _process_headers(_normalize_wsgi_headers(environ)) - ctx = _server_shared.create_request_context( - endpoint.method, http_method, headers - ) + content_type = headers.get("content-type", "") + protocol = negotiate_server_protocol(content_type) + send_trailers: Callable[[list[tuple[str, str]]], None] | None = None - match endpoint: - case EndpointUnarySync(): - return self._handle_unary( - environ, start_response, http_method, endpoint, ctx, headers - ) - case ( - EndpointClientStreamSync() - | EndpointServerStreamSync() - | EndpointBidiStreamSync() - ): - return self._handle_stream( - environ, start_response, headers, endpoint, ctx - ) + if protocol.uses_trailers(): + send_trailers = environ.get("wsgi.ext.http.send_trailers") + if not send_trailers: + msg = f"WSGI server does not support WSGI trailers extension but protocol for content-type '{content_type}' requires trailers" + raise RuntimeError(msg) + ctx = protocol.create_request_context(endpoint.method, http_method, headers) + + if isinstance(endpoint, EndpointUnarySync) and isinstance( + protocol, ConnectServerProtocol + ): + return self._handle_unary( + environ, start_response, http_method, endpoint, ctx, headers + ) + return self._handle_stream( + environ, start_response, send_trailers, protocol, headers, endpoint, ctx + ) except Exception as e: return self._handle_error(e, ctx, start_response) @@ -394,18 +395,17 @@ def _handle_stream( self, environ: WSGIEnvironment, start_response: StartResponse, + send_trailers: Callable[[list[tuple[str, str]]], None] | None, + protocol: ServerProtocol, headers: Headers, - endpoint: EndpointClientStreamSync[_REQ, _RES] - | EndpointServerStreamSync[_REQ, _RES] - | EndpointBidiStreamSync[_REQ, _RES], + endpoint: EndpointSync[_REQ, _RES], ctx: RequestContext[_REQ, _RES], ) -> Iterable[bytes]: - accept_compression = headers.get( - CONNECT_STREAMING_HEADER_ACCEPT_COMPRESSION, "" + req_compression, resp_compression = protocol.negotiate_stream_compression( + headers ) - response_compression = _compression.negotiate_compression(accept_compression) - codec_name = codec_name_from_content_type( + codec_name = protocol.codec_name_from_content_type( headers.get("content-type", ""), stream=True ) codec = get_codec(codec_name) @@ -419,19 +419,24 @@ def _handle_stream( ) ], ) - req_compression_name = headers.get( - CONNECT_STREAMING_HEADER_COMPRESSION, "identity" - ) - req_compression = ( - _compression.get_compression(req_compression_name) - or _compression.IdentityCompression() - ) - request_stream = _request_stream( - environ, endpoint.method.input, codec, req_compression, self._read_max_bytes - ) - writer = EnvelopeWriter(codec, response_compression) + writer = protocol.create_envelope_writer(codec, resp_compression) try: + if not req_compression: + raise ConnectError( + Code.UNIMPLEMENTED, "Unrecognized request compression" + ) + request_stream = _request_stream( + environ, + endpoint.method.input, + codec, + req_compression, + self._read_max_bytes, + ) match endpoint: + case _server_shared.EndpointUnarySync(): + request = _consume_single_request(request_stream) + response = endpoint.function(request, ctx) + response_stream = iter([response]) case _server_shared.EndpointClientStreamSync(): response = endpoint.function(request_stream, ctx) response_stream = iter([response]) @@ -446,28 +451,39 @@ def _handle_stream( # Response headers set before the first message should be set to the context and # we can send them. _send_stream_response_headers( - start_response, codec, response_compression.name(), ctx + start_response, protocol, codec, resp_compression.name(), ctx ) if first_response is None: # It's valid for a service method to return no messages, finish the response # without error. - return [writer.end(ctx.response_trailers(), None)] + return [ + _end_response( + writer.end(ctx.response_trailers(), None), send_trailers + ) + ] # WSGI requires start_response to be called before returning the body iterator. # This means we cannot call yield in this function since the function would not # run at all until the iterator is consumed, meaning start_response wouldn't have # been called in time. So we return the response stream as a separate generator # function. This means some duplication of error handling. - return _response_stream(first_response, response_stream, writer, ctx) + return _response_stream( + first_response, response_stream, writer, send_trailers, ctx + ) except Exception as e: # Exception before any response message was returned. An error after the first # response message will be handled by _response_stream, so here we have a # full error-only response. _send_stream_response_headers( - start_response, codec, response_compression.name(), ctx + start_response, protocol, codec, resp_compression.name(), ctx ) return [ - writer.end(ctx.response_trailers(), ConnectWireError.from_exception(e)) + _end_response( + writer.end( + ctx.response_trailers(), ConnectWireError.from_exception(e) + ), + send_trailers, + ) ] def _handle_error( @@ -495,6 +511,17 @@ def _handle_error( return body +def _end_response( + message: bytes | Headers, + send_trailers: Callable[[list[tuple[str, str]]], None] | None, +) -> bytes: + if isinstance(message, bytes): + return message + assert send_trailers is not None # noqa: S101 + send_trailers(list(message.allitems())) + return b"" + + def _add_context_headers(headers: list[tuple[str, str]], ctx: RequestContext) -> None: headers.extend((key, value) for key, value in ctx.response_headers().allitems()) headers.extend( @@ -504,13 +531,14 @@ def _add_context_headers(headers: list[tuple[str, str]], ctx: RequestContext) -> def _send_stream_response_headers( start_response: StartResponse, + protocol: ServerProtocol, codec: Codec, compression_name: str, ctx: RequestContext, ) -> None: response_headers = [ - ("content-type", f"{CONNECT_STREAMING_CONTENT_TYPE_PREFIX}{codec.name()}"), - (CONNECT_STREAMING_HEADER_COMPRESSION, compression_name), + ("content-type", protocol.content_type(codec)), + (protocol.compression_header_name(), compression_name), ] response_headers.extend( (key, value) for key, value in ctx.response_headers().allitems() @@ -534,6 +562,7 @@ def _response_stream( first_response: _RES, response_stream: Iterator[_RES], writer: EnvelopeWriter, + send_trailers: Callable[[list[tuple[str, str]]], None] | None, ctx: RequestContext, ) -> Iterable[bytes]: error: Exception | None = None @@ -546,9 +575,12 @@ def _response_stream( except Exception as e: error = e finally: - yield writer.end( - ctx.response_trailers(), - ConnectWireError.from_exception(error) if error else None, + yield _end_response( + writer.end( + ctx.response_trailers(), + ConnectWireError.from_exception(error) if error else None, + ), + send_trailers, ) diff --git a/src/connectrpc/client.py b/src/connectrpc/client.py index a727b63..44f8ae7 100644 --- a/src/connectrpc/client.py +++ b/src/connectrpc/client.py @@ -4,5 +4,5 @@ from ._client_async import ConnectClient -from ._client_shared import ResponseMetadata from ._client_sync import ConnectClientSync +from ._response_metadata import ResponseMetadata diff --git a/test/test_grpc.py b/test/test_grpc.py new file mode 100644 index 0000000..54e7b0e --- /dev/null +++ b/test/test_grpc.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import grpc +import pytest +import pytest_asyncio +from example.eliza_pb2 import SayRequest, SayResponse +from example.eliza_pb2_grpc import ElizaServiceStub +from pyvoy import Interface, PyvoyServer + +from connectrpc._protocol_grpc import _parse_timeout + + +@pytest_asyncio.fixture(scope="module") +async def url_asgi(): + async with PyvoyServer("example.eliza_service") as server: + yield f"localhost:{server.listener_port}" + + +@pytest_asyncio.fixture(scope="module") +async def url_wsgi(): + async with PyvoyServer("example.eliza_service_sync", interface="wsgi") as server: + yield f"localhost:{server.listener_port}" + + +@pytest.fixture(params=["asgi", "wsgi"]) +def interface(request: pytest.FixtureRequest) -> Interface: + return request.param + + +@pytest.fixture +def url(interface: Interface, url_asgi: str, url_wsgi: str) -> str: + match interface: + case "asgi": + return url_asgi + case "wsgi": + return url_wsgi + + +@pytest.mark.asyncio +async def test_grpc_unary(url: str) -> None: + async with grpc.aio.insecure_channel(url) as channel: + client = ElizaServiceStub(channel) + response: SayResponse = await client.Say(SayRequest(sentence="Hello")) + assert len(response.sentence) > 0 + + +def test_parse_timeout() -> None: + assert _parse_timeout("1H") == 3600 * 1000 + assert _parse_timeout("2M") == 2 * 60 * 1000 + assert _parse_timeout("3S") == 3 * 1000 + assert _parse_timeout("4m") == 4 + # We parse gRPC timeouts with connect conventions, which means integer milliseconds + # The below parse to 0ms. + assert _parse_timeout("5u") == 0 + assert _parse_timeout("6n") == 0 + with pytest.raises(ValueError, match="protocol error") as excinfo: + _parse_timeout("100X") + assert "protocol error: timeout has invalid unit 'X'" in str(excinfo.value) diff --git a/uv.lock b/uv.lock index 46236ce..9a58018 100644 --- a/uv.lock +++ b/uv.lock @@ -458,6 +458,7 @@ dev = [ { name = "connect-python-example" }, { name = "daphne" }, { name = "granian" }, + { name = "grpcio-tools" }, { name = "gunicorn", extra = ["gevent"] }, { name = "httpx", extra = ["http2"] }, { name = "hypercorn" }, @@ -470,7 +471,7 @@ dev = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-timeout" }, - { name = "pyvoy", marker = "sys_platform != 'win32'" }, + { name = "pyvoy" }, { name = "ruff" }, { name = "twisted", extra = ["http2", "tls"] }, { name = "typing-extensions" }, @@ -492,6 +493,7 @@ dev = [ { name = "connect-python-example", editable = "example" }, { name = "daphne", specifier = "==4.2.1" }, { name = "granian", specifier = "==2.5.7" }, + { name = "grpcio-tools", specifier = "==1.76.0" }, { name = "gunicorn", extras = ["gevent"], specifier = "==23.0.0" }, { name = "httpx", extras = ["http2"], specifier = "==0.28.1" }, { name = "hypercorn", specifier = "==0.17.3" }, @@ -504,7 +506,7 @@ dev = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-timeout", specifier = "==2.4.0" }, - { name = "pyvoy", marker = "sys_platform != 'win32'", specifier = "==0.1.2" }, + { name = "pyvoy", specifier = "==0.2.0" }, { name = "ruff", specifier = "~=0.13.2" }, { name = "twisted", extras = ["tls", "http2"], specifier = "==25.5.0" }, { name = "typing-extensions", specifier = "==4.15.0" }, @@ -731,6 +733,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/34/6171ab34715ed210bcd6c2b38839cc792993cff4fe2493f50bc92b0086a0/daphne-4.2.1-py3-none-any.whl", hash = "sha256:881e96b387b95b35ad85acd855f229d7f5b79073d6649089c8a33f661885e055", size = 29015, upload-time = "2025-07-02T12:57:03.793Z" }, ] +[[package]] +name = "envoy-server" +version = "1.36.4" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/87/7f15cea22d2f41f44306078ccbdf814c63900ac381283820644ce817f698/envoy_server-1.36.4-py3-none-macosx_15_0_arm64.whl", hash = "sha256:c2db4ce7214ac9a32e45127c419f4db30a0ab0039885ba4e111d581e6edff9aa", size = 31980431, upload-time = "2025-12-14T14:27:24.746Z" }, + { url = "https://files.pythonhosted.org/packages/97/7c/541016a73f4d6329a5af9390b956adfebbf97e0023d3b96eeed2803fcd59/envoy_server-1.36.4-py3-none-manylinux_2_31_aarch64.whl", hash = "sha256:05b06a19c4f6499c752f1a5a034a02752f0cdbd0422c3d8eb515237a36e82cd2", size = 27888994, upload-time = "2025-12-14T14:27:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e0/875225f5bf2ef9da9de0de1936969205b7d873823d7cdefa4bad2c0e413a/envoy_server-1.36.4-py3-none-manylinux_2_31_x86_64.whl", hash = "sha256:86dfc14ae7fad32de3b3ace54fbd400187c3b4fd6731781ff5bc633fa133f3f9", size = 29512446, upload-time = "2025-12-14T14:27:30.462Z" }, + { url = "https://files.pythonhosted.org/packages/92/48/7e4ccb57cea7c945775147f0f47ffb068f828abafe2d185f435cbd02ea70/envoy_server-1.36.4-py3-none-win_amd64.whl", hash = "sha256:527a5cdcb30760d0f978bff3bb6999e204fcfee439932255d6f84b6d504ab25c", size = 18345267, upload-time = "2025-12-14T14:38:00.486Z" }, +] + [[package]] name = "exceptiongroup" version = "1.3.0" @@ -999,6 +1012,130 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/b1/9ff6578d789a89812ff21e4e0f80ffae20a65d5dd84e7a17873fe3b365be/griffe-1.14.0-py3-none-any.whl", hash = "sha256:0e9d52832cccf0f7188cfe585ba962d2674b241c01916d780925df34873bceb0", size = 144439, upload-time = "2025-09-05T15:02:27.511Z" }, ] +[[package]] +name = "grpcio" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/17/ff4795dc9a34b6aee6ec379f1b66438a3789cd1315aac0cbab60d92f74b3/grpcio-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:65a20de41e85648e00305c1bb09a3598f840422e522277641145a32d42dcefcc", size = 5840037, upload-time = "2025-10-21T16:20:25.069Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ff/35f9b96e3fa2f12e1dcd58a4513a2e2294a001d64dec81677361b7040c9a/grpcio-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:40ad3afe81676fd9ec6d9d406eda00933f218038433980aa19d401490e46ecde", size = 11836482, upload-time = "2025-10-21T16:20:30.113Z" }, + { url = "https://files.pythonhosted.org/packages/3e/1c/8374990f9545e99462caacea5413ed783014b3b66ace49e35c533f07507b/grpcio-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:035d90bc79eaa4bed83f524331d55e35820725c9fbb00ffa1904d5550ed7ede3", size = 6407178, upload-time = "2025-10-21T16:20:32.733Z" }, + { url = "https://files.pythonhosted.org/packages/1e/77/36fd7d7c75a6c12542c90a6d647a27935a1ecaad03e0ffdb7c42db6b04d2/grpcio-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4215d3a102bd95e2e11b5395c78562967959824156af11fa93d18fdd18050990", size = 7075684, upload-time = "2025-10-21T16:20:35.435Z" }, + { url = "https://files.pythonhosted.org/packages/38/f7/e3cdb252492278e004722306c5a8935eae91e64ea11f0af3437a7de2e2b7/grpcio-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49ce47231818806067aea3324d4bf13825b658ad662d3b25fada0bdad9b8a6af", size = 6611133, upload-time = "2025-10-21T16:20:37.541Z" }, + { url = "https://files.pythonhosted.org/packages/7e/20/340db7af162ccd20a0893b5f3c4a5d676af7b71105517e62279b5b61d95a/grpcio-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8cc3309d8e08fd79089e13ed4819d0af72aa935dd8f435a195fd152796752ff2", size = 7195507, upload-time = "2025-10-21T16:20:39.643Z" }, + { url = "https://files.pythonhosted.org/packages/10/f0/b2160addc1487bd8fa4810857a27132fb4ce35c1b330c2f3ac45d697b106/grpcio-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:971fd5a1d6e62e00d945423a567e42eb1fa678ba89072832185ca836a94daaa6", size = 8160651, upload-time = "2025-10-21T16:20:42.492Z" }, + { url = "https://files.pythonhosted.org/packages/2c/2c/ac6f98aa113c6ef111b3f347854e99ebb7fb9d8f7bb3af1491d438f62af4/grpcio-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d9adda641db7207e800a7f089068f6f645959f2df27e870ee81d44701dd9db3", size = 7620568, upload-time = "2025-10-21T16:20:45.995Z" }, + { url = "https://files.pythonhosted.org/packages/90/84/7852f7e087285e3ac17a2703bc4129fafee52d77c6c82af97d905566857e/grpcio-1.76.0-cp310-cp310-win32.whl", hash = "sha256:063065249d9e7e0782d03d2bca50787f53bd0fb89a67de9a7b521c4a01f1989b", size = 3998879, upload-time = "2025-10-21T16:20:48.592Z" }, + { url = "https://files.pythonhosted.org/packages/10/30/d3d2adcbb6dd3ff59d6ac3df6ef830e02b437fb5c90990429fd180e52f30/grpcio-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6ae758eb08088d36812dd5d9af7a9859c05b1e0f714470ea243694b49278e7b", size = 4706892, upload-time = "2025-10-21T16:20:50.697Z" }, + { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, + { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, + { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, + { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, + { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, + { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, + { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, + { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, + { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, + { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, + { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, + { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, + { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/77/17d60d636ccd86a0db0eccc24d02967bbc3eea86b9db7324b04507ebaa40/grpcio_tools-1.76.0.tar.gz", hash = "sha256:ce80169b5e6adf3e8302f3ebb6cb0c3a9f08089133abca4b76ad67f751f5ad88", size = 5390807, upload-time = "2025-10-21T16:26:55.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/4b/6fceb806f6d5055793f5db0d7a1e3449ea16482c2aec3ad93b05678c325a/grpcio_tools-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9b99086080ca394f1da9894ee20dedf7292dd614e985dcba58209a86a42de602", size = 2545596, upload-time = "2025-10-21T16:24:25.134Z" }, + { url = "https://files.pythonhosted.org/packages/3b/11/57af2f3f32016e6e2aae063a533aae2c0e6c577bc834bef97277a7fa9733/grpcio_tools-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8d95b5c2394bbbe911cbfc88d15e24c9e174958cb44dad6aa8c46fe367f6cc2a", size = 5843462, upload-time = "2025-10-21T16:24:31.046Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8b/470bedaf7fb75fb19500b4c160856659746dcf53e3d9241fcc17e3af7155/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d54e9ce2ffc5d01341f0c8898c1471d887ae93d77451884797776e0a505bd503", size = 2591938, upload-time = "2025-10-21T16:24:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/77/3e/530e848e00d6fe2db152984b2c9432bb8497a3699719fd7898d05cb7d95e/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c83f39f64c2531336bd8d5c846a2159c9ea6635508b0f8ed3ad0d433e25b53c9", size = 2905296, upload-time = "2025-10-21T16:24:34.938Z" }, + { url = "https://files.pythonhosted.org/packages/75/b5/632229d17364eb7db5d3d793131172b2380323c4e6500f528743e477267c/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be480142fae0d986d127d6cb5cbc0357e4124ba22e96bb8b9ece32c48bc2c8ea", size = 2656266, upload-time = "2025-10-21T16:24:37.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/71/5756aa9a14d16738b04677b89af8612112d69fb098ffdbc5666020933f23/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7fefd41fc4ca11fab36f42bdf0f3812252988f8798fca8bec8eae049418deacd", size = 3105798, upload-time = "2025-10-21T16:24:40.408Z" }, + { url = "https://files.pythonhosted.org/packages/ab/de/9058021da11be399abe6c5d2a9a2abad1b00d367111018637195d107539b/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:63551f371082173e259e7f6ec24b5f1fe7d66040fadd975c966647bca605a2d3", size = 3654923, upload-time = "2025-10-21T16:24:42.52Z" }, + { url = "https://files.pythonhosted.org/packages/8e/93/29f04cc18f1023b2a4342374a45b1cd87a0e1458fc44aea74baad5431dcd/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75a2c34584c99ff47e5bb267866e7dec68d30cd3b2158e1ee495bfd6db5ad4f0", size = 3322558, upload-time = "2025-10-21T16:24:44.356Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ab/8936708d30b9a2484f6b093dfc57843c1d0380de0eba78a8ad8693535f26/grpcio_tools-1.76.0-cp310-cp310-win32.whl", hash = "sha256:908758789b0a612102c88e8055b7191eb2c4290d5d6fc50fb9cac737f8011ef1", size = 993621, upload-time = "2025-10-21T16:24:46.7Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d2/c5211feb81a532eca2c4dddd00d4971b91c10837cd083781f6ab3a6fdb5b/grpcio_tools-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:ec6e49e7c4b2a222eb26d1e1726a07a572b6e629b2cf37e6bb784c9687904a52", size = 1158401, upload-time = "2025-10-21T16:24:48.416Z" }, + { url = "https://files.pythonhosted.org/packages/73/d1/efbeed1a864c846228c0a3b322e7a2d6545f025e35246aebf96496a36004/grpcio_tools-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6480f6af6833850a85cca1c6b435ef4ffd2ac8e88ef683b4065233827950243", size = 2545931, upload-time = "2025-10-21T16:24:50.201Z" }, + { url = "https://files.pythonhosted.org/packages/af/8e/f257c0f565d9d44658301238b01a9353bc6f3b272bb4191faacae042579d/grpcio_tools-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c7c23fe1dc09818e16a48853477806ad77dd628b33996f78c05a293065f8210c", size = 5844794, upload-time = "2025-10-21T16:24:53.312Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c0/6c1e89c67356cb20e19ed670c5099b13e40fd678cac584c778f931666a86/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fcdce7f7770ff052cd4e60161764b0b3498c909bde69138f8bd2e7b24a3ecd8f", size = 2591772, upload-time = "2025-10-21T16:24:55.729Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/5f33aa7bc3ddaad0cfd2f4e950ac4f1a310e8d0c7b1358622a581e8b7a2f/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b598fdcebffa931c7da5c9e90b5805fff7e9bc6cf238319358a1b85704c57d33", size = 2905140, upload-time = "2025-10-21T16:24:57.952Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3e/23e3a52a77368f47188ed83c34eb53866d3ce0f73835b2f6764844ae89eb/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6a9818ff884796b12dcf8db32126e40ec1098cacf5697f27af9cfccfca1c1fae", size = 2656475, upload-time = "2025-10-21T16:25:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/85/a74ae87ec7dbd3d2243881f5c548215aed1148660df7945be3a125ba9a21/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:105e53435b2eed3961da543db44a2a34479d98d18ea248219856f30a0ca4646b", size = 3106158, upload-time = "2025-10-21T16:25:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/54/d5/a6ed1e5823bc5d55a1eb93e0c14ccee0b75951f914832ab51fb64d522a0f/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:454a1232c7f99410d92fa9923c7851fd4cdaf657ee194eac73ea1fe21b406d6e", size = 3654980, upload-time = "2025-10-21T16:25:05.717Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/c05d5501ba156a242079ef71d073116d2509c195b5e5e74c545f0a3a3a69/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca9ccf667afc0268d45ab202af4556c72e57ea36ebddc93535e1a25cbd4f8aba", size = 3322658, upload-time = "2025-10-21T16:25:07.885Z" }, + { url = "https://files.pythonhosted.org/packages/02/b6/ee0317b91da19a7537d93c4161cbc2a45a165c8893209b0bbd470d830ffa/grpcio_tools-1.76.0-cp311-cp311-win32.whl", hash = "sha256:a83c87513b708228b4cad7619311daba65b40937745103cadca3db94a6472d9c", size = 993837, upload-time = "2025-10-21T16:25:10.133Z" }, + { url = "https://files.pythonhosted.org/packages/81/63/9623cadf0406b264737f16d4ed273bb2d65001d87fbd803b565c45d665d1/grpcio_tools-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:2ce5e87ec71f2e4041dce4351f2a8e3b713e3bca6b54c69c3fbc6c7ad1f4c386", size = 1158634, upload-time = "2025-10-21T16:25:12.705Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ca/a931c1439cabfe305c9afd07e233150cd0565aa062c20d1ee412ed188852/grpcio_tools-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:4ad555b8647de1ebaffb25170249f89057721ffb74f7da96834a07b4855bb46a", size = 2546852, upload-time = "2025-10-21T16:25:15.024Z" }, + { url = "https://files.pythonhosted.org/packages/4c/07/935cfbb7dccd602723482a86d43fbd992f91e9867bca0056a1e9f348473e/grpcio_tools-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:243af7c8fc7ff22a40a42eb8e0f6f66963c1920b75aae2a2ec503a9c3c8b31c1", size = 5841777, upload-time = "2025-10-21T16:25:17.425Z" }, + { url = "https://files.pythonhosted.org/packages/e4/92/8fcb5acebdccb647e0fa3f002576480459f6cf81e79692d7b3c4d6e29605/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8207b890f423142cc0025d041fb058f7286318df6a049565c27869d73534228b", size = 2594004, upload-time = "2025-10-21T16:25:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ea/64838e8113b7bfd4842b15c815a7354cb63242fdce9d6648d894b5d50897/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3dafa34c2626a6691d103877e8a145f54c34cf6530975f695b396ed2fc5c98f8", size = 2905563, upload-time = "2025-10-21T16:25:21.889Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d6/53798827d821098219e58518b6db52161ce4985620850aa74ce3795da8a7/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:30f1d2dda6ece285b3d9084e94f66fa721ebdba14ae76b2bc4c581c8a166535c", size = 2656936, upload-time = "2025-10-21T16:25:24.369Z" }, + { url = "https://files.pythonhosted.org/packages/89/a3/d9c1cefc46a790eec520fe4e70e87279abb01a58b1a3b74cf93f62b824a2/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a889af059dc6dbb82d7b417aa581601316e364fe12eb54c1b8d95311ea50916d", size = 3109811, upload-time = "2025-10-21T16:25:26.711Z" }, + { url = "https://files.pythonhosted.org/packages/50/75/5997752644b73b5d59377d333a51c8a916606df077f5a487853e37dca289/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c3f2c3c44c56eb5d479ab178f0174595d0a974c37dade442f05bb73dfec02f31", size = 3658786, upload-time = "2025-10-21T16:25:28.819Z" }, + { url = "https://files.pythonhosted.org/packages/84/47/dcf8380df4bd7931ffba32fc6adc2de635b6569ca27fdec7121733797062/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:479ce02dff684046f909a487d452a83a96b4231f7c70a3b218a075d54e951f56", size = 3325144, upload-time = "2025-10-21T16:25:30.863Z" }, + { url = "https://files.pythonhosted.org/packages/04/88/ea3e5fdb874d8c2d04488e4b9d05056537fba70915593f0c283ac77df188/grpcio_tools-1.76.0-cp312-cp312-win32.whl", hash = "sha256:9ba4bb539936642a44418b38ee6c3e8823c037699e2cb282bd8a44d76a4be833", size = 993523, upload-time = "2025-10-21T16:25:32.594Z" }, + { url = "https://files.pythonhosted.org/packages/de/b1/ce7d59d147675ec191a55816be46bc47a343b5ff07279eef5817c09cc53e/grpcio_tools-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd489016766b05f9ed8a6b6596004b62c57d323f49593eac84add032a6d43f7", size = 1158493, upload-time = "2025-10-21T16:25:34.5Z" }, + { url = "https://files.pythonhosted.org/packages/13/01/b16fe73f129df49811d886dc99d3813a33cf4d1c6e101252b81c895e929f/grpcio_tools-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ff48969f81858397ef33a36b326f2dbe2053a48b254593785707845db73c8f44", size = 2546312, upload-time = "2025-10-21T16:25:37.138Z" }, + { url = "https://files.pythonhosted.org/packages/25/17/2594c5feb76bb0b25bfbf91ec1075b276e1b2325e4bc7ea649a7b5dbf353/grpcio_tools-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa2f030fd0ef17926026ee8e2b700e388d3439155d145c568fa6b32693277613", size = 5839627, upload-time = "2025-10-21T16:25:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c6/097b1aa26fbf72fb3cdb30138a2788529e4f10d8759de730a83f5c06726e/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bacbf3c54f88c38de8e28f8d9b97c90b76b105fb9ddef05d2c50df01b32b92af", size = 2592817, upload-time = "2025-10-21T16:25:42.301Z" }, + { url = "https://files.pythonhosted.org/packages/03/78/d1d985b48592a674509a85438c1a3d4c36304ddfc99d1b05d27233b51062/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0d4e4afe9a0e3c24fad2f1af45f98cf8700b2bfc4d790795756ba035d2ea7bdc", size = 2905186, upload-time = "2025-10-21T16:25:44.395Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/770afbb47f0b5f594b93a7b46a95b892abda5eebe60efb511e96cee52170/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fbbd4e1fc5af98001ceef5e780e8c10921d94941c3809238081e73818ef707f1", size = 2656188, upload-time = "2025-10-21T16:25:46.942Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2b/017c2fcf4c5d3cf00cf7d5ce21eb88521de0d89bdcf26538ad2862ec6d07/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b05efe5a59883ab8292d596657273a60e0c3e4f5a9723c32feb9fc3a06f2f3ef", size = 3109141, upload-time = "2025-10-21T16:25:49.137Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5f/2495f88e3d50c6f2c2da2752bad4fa3a30c52ece6c9d8b0c636cd8b1430b/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:be483b90e62b7892eb71fa1fc49750bee5b2ee35b5ec99dd2b32bed4bedb5d71", size = 3657892, upload-time = "2025-10-21T16:25:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1d/c4f39d31b19d9baf35d900bf3f969ce1c842f63a8560c8003ed2e5474760/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:630cd7fd3e8a63e20703a7ad816979073c2253e591b5422583c27cae2570de73", size = 3324778, upload-time = "2025-10-21T16:25:54.629Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b6/35ee3a6e4af85a93da28428f81f4b29bcb36f6986b486ad71910fcc02e25/grpcio_tools-1.76.0-cp313-cp313-win32.whl", hash = "sha256:eb2567280f9f6da5444043f0e84d8408c7a10df9ba3201026b30e40ef3814736", size = 993084, upload-time = "2025-10-21T16:25:56.52Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7a/5bd72344d86ee860e5920c9a7553cfe3bc7b1fce79f18c00ac2497f5799f/grpcio_tools-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:0071b1c0bd0f5f9d292dca4efab32c92725d418e57f9c60acdc33c0172af8b53", size = 1158151, upload-time = "2025-10-21T16:25:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c0/aa20eebe8f3553b7851643e9c88d237c3a6ca30ade646897e25dbb27be99/grpcio_tools-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:c53c5719ef2a435997755abde3826ba4087174bd432aa721d8fac781fcea79e4", size = 2546297, upload-time = "2025-10-21T16:26:01.258Z" }, + { url = "https://files.pythonhosted.org/packages/d9/98/6af702804934443c1d0d4d27d21b990d92d22ddd1b6bec6b056558cbbffa/grpcio_tools-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:e3db1300d7282264639eeee7243f5de7e6a7c0283f8bf05d66c0315b7b0f0b36", size = 5839804, upload-time = "2025-10-21T16:26:05.495Z" }, + { url = "https://files.pythonhosted.org/packages/ea/8d/7725fa7b134ef8405ffe0a37c96eeb626e5af15d70e1bdac4f8f1abf842e/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b018a4b7455a7e8c16d0fdb3655a6ba6c9536da6de6c5d4f11b6bb73378165b", size = 2593922, upload-time = "2025-10-21T16:26:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/de/ff/5b6b5012c79fa72f9107dc13f7226d9ce7e059ea639fd8c779e0dd284386/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ec6e4de3866e47cfde56607b1fae83ecc5aa546e06dec53de11f88063f4b5275", size = 2905327, upload-time = "2025-10-21T16:26:09.668Z" }, + { url = "https://files.pythonhosted.org/packages/24/01/2691d369ea462cd6b6c92544122885ca01f7fa5ac75dee023e975e675858/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b8da4d828883913f1852bdd67383713ae5c11842f6c70f93f31893eab530aead", size = 2656214, upload-time = "2025-10-21T16:26:11.773Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e7/3f8856e6ec3dd492336a91572993344966f237b0e3819fbe96437b19d313/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5c120c2cf4443121800e7f9bcfe2e94519fa25f3bb0b9882359dd3b252c78a7b", size = 3109889, upload-time = "2025-10-21T16:26:15.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ce5248072e47db276dc7e069e93978dcde490c959788ce7cce8081d0bfdc/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8b7df5591d699cd9076065f1f15049e9c3597e0771bea51c8c97790caf5e4197", size = 3657939, upload-time = "2025-10-21T16:26:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/f6/df/81ff88af93c52135e425cd5ec9fe8b186169c7d5f9e0409bdf2bbedc3919/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a25048c5f984d33e3f5b6ad7618e98736542461213ade1bd6f2fcfe8ce804e3d", size = 3324752, upload-time = "2025-10-21T16:26:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/35/3d/f6b83044afbf6522254a3b509515a00fed16a819c87731a478dbdd1d35c1/grpcio_tools-1.76.0-cp314-cp314-win32.whl", hash = "sha256:4b77ce6b6c17869858cfe14681ad09ed3a8a80e960e96035de1fd87f78158740", size = 1015578, upload-time = "2025-10-21T16:26:22.517Z" }, + { url = "https://files.pythonhosted.org/packages/95/4d/31236cddb7ffb09ba4a49f4f56d2608fec3bbb21c7a0a975d93bca7cd22e/grpcio_tools-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:2ccd2c8d041351cc29d0fc4a84529b11ee35494a700b535c1f820b642f2a72fc", size = 1190242, upload-time = "2025-10-21T16:26:25.296Z" }, +] + [[package]] name = "gunicorn" version = "23.0.0" @@ -1494,16 +1631,16 @@ wheels = [ [[package]] name = "protobuf" -version = "6.30.0" +version = "6.31.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/53/6a/2629bb3529e5bdfbd6c4608ff5c7d942cd4beae85793f84ba543aab2548a/protobuf-6.30.0.tar.gz", hash = "sha256:852b675d276a7d028f660da075af1841c768618f76b90af771a8e2c29e6f5965", size = 429239, upload-time = "2025-03-05T00:48:55.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/f3/b9655a711b32c19720253f6f06326faf90580834e2e83f840472d752bc8b/protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a", size = 441797, upload-time = "2025-05-28T19:25:54.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/76/8b1cbf762d98b09fcb29bbc6eca97dc6e1cd865b97a49c443aa23f1a9f82/protobuf-6.30.0-cp310-abi3-win32.whl", hash = "sha256:7337d76d8efe65ee09ee566b47b5914c517190196f414e5418fa236dfd1aed3e", size = 419141, upload-time = "2025-03-05T00:48:41.378Z" }, - { url = "https://files.pythonhosted.org/packages/57/50/2ea2fb4533321438f5106723c70c303529ba184540e619ebe75e790d402e/protobuf-6.30.0-cp310-abi3-win_amd64.whl", hash = "sha256:9b33d51cc95a7ec4f407004c8b744330b6911a37a782e2629c67e1e8ac41318f", size = 430995, upload-time = "2025-03-05T00:48:44.661Z" }, - { url = "https://files.pythonhosted.org/packages/a1/7d/a7dfa7aa3deda114920b1ed57c0026e85a976e74658db2784a0443510252/protobuf-6.30.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:52d4bb6fe76005860e1d0b8bfa126f5c97c19cc82704961f60718f50be16942d", size = 417570, upload-time = "2025-03-05T00:48:46.65Z" }, - { url = "https://files.pythonhosted.org/packages/11/87/a9c7b020c4072dc34e3a2a3cde69366ffc623afff0e7f10f4e5275aaec01/protobuf-6.30.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:7940ab4dfd60d514b2e1d3161549ea7aed5be37d53bafde16001ac470a3e202b", size = 317310, upload-time = "2025-03-05T00:48:47.784Z" }, - { url = "https://files.pythonhosted.org/packages/95/66/424db2262723781dc94208ff9ce201df2f44f18a46fbff3c067812c6b5b9/protobuf-6.30.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:d79bf6a202a536b192b7e8d295d7eece0c86fbd9b583d147faf8cfeff46bf598", size = 316203, upload-time = "2025-03-05T00:48:49.736Z" }, - { url = "https://files.pythonhosted.org/packages/51/6f/21c2b7de96c3051f847a4a88a12fdf015ed6b7d50fc131fb101a739bd7a5/protobuf-6.30.0-py3-none-any.whl", hash = "sha256:e5ef216ea061b262b8994cb6b7d6637a4fb27b3fb4d8e216a6040c0b93bd10d7", size = 167054, upload-time = "2025-03-05T00:48:54.286Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6f/6ab8e4bf962fd5570d3deaa2d5c38f0a363f57b4501047b5ebeb83ab1125/protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9", size = 423603, upload-time = "2025-05-28T19:25:41.198Z" }, + { url = "https://files.pythonhosted.org/packages/44/3a/b15c4347dd4bf3a1b0ee882f384623e2063bb5cf9fa9d57990a4f7df2fb6/protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447", size = 435283, upload-time = "2025-05-28T19:25:44.275Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c9/b9689a2a250264a84e66c46d8862ba788ee7a641cdca39bccf64f59284b7/protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402", size = 425604, upload-time = "2025-05-28T19:25:45.702Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39", size = 322115, upload-time = "2025-05-28T19:25:47.128Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6", size = 321070, upload-time = "2025-05-28T19:25:50.036Z" }, + { url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e", size = 168724, upload-time = "2025-05-28T19:25:53.926Z" }, ] [[package]] @@ -1817,29 +1954,40 @@ wheels = [ [[package]] name = "pyvoy" -version = "0.1.2" +version = "0.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "envoy-server" }, { name = "find-libpython" }, { name = "pyyaml" }, - { name = "uvloop" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/af/bfb318526b6d86c1cb1c8437b5ec5d751467397a0c233ec8d03ccaee43a4/pyvoy-0.1.2-cp310-cp310-macosx_15_0_arm64.whl", hash = "sha256:8d449ee02cf1fe1c0f5858ac3c81e39d9ffb5033a03f07ecec4948295f6fae89", size = 32565596, upload-time = "2025-11-18T06:48:22.18Z" }, - { url = "https://files.pythonhosted.org/packages/19/ea/060de57934ee2ce0ea23db830cf400c4786056505103def9d9d1255ce59c/pyvoy-0.1.2-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:d12939bdd01f45f64c9757eafcc6dbbffee0b5cd5d69a41982237763ff4c8908", size = 28528062, upload-time = "2025-11-18T06:48:25.739Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f1/d962f4d77ab40e75ad069d8c861f94a32c988732de19db6737e7155d6811/pyvoy-0.1.2-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:cc7eb5dea99f267bb1796bd63d3439ebcebf9df4acdf133dfaf4b66257907003", size = 30174575, upload-time = "2025-11-18T06:48:28.593Z" }, - { url = "https://files.pythonhosted.org/packages/df/89/03aa01547af96dd7aa9be359a572b924e1f688c074ef45e553dbc0f2db3d/pyvoy-0.1.2-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:8b0fef26ca9d380c3ea52c5236df29e8f98600a10fe56ae7c6377b92da68456d", size = 32565143, upload-time = "2025-11-18T06:48:31.15Z" }, - { url = "https://files.pythonhosted.org/packages/7b/e4/be897bfe763bbff972e77780d1f203edc2c9a3410a38aeb811eb9ba064cc/pyvoy-0.1.2-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:d099e4124aa90c1ef4b24c53e8024f948c29d5a99d924740e0181ab14e8022c5", size = 28528042, upload-time = "2025-11-18T06:48:33.665Z" }, - { url = "https://files.pythonhosted.org/packages/22/50/b99b6b4e6f5cf6b69449b7f305fcb83dc92da63143bbbff6bcdda81f0633/pyvoy-0.1.2-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:8489b03ae6b0dc833d09997ec960dfff6afbfee891c60a42d21342dbe8016dde", size = 30174054, upload-time = "2025-11-18T06:48:36.578Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b3/48e54808f68efd4641aad5dec361b2119754f7ad8c95ae18a7f1027c4005/pyvoy-0.1.2-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:82f6db5874a104b4dc058f5998ab4b7b8dd5d2158b3fe1812447141df3bcf0af", size = 32562089, upload-time = "2025-11-18T06:48:39.23Z" }, - { url = "https://files.pythonhosted.org/packages/72/f9/ddb769f361a7644aec083cadc180d6d963fdf3e0838a993c4c5a67573a4e/pyvoy-0.1.2-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:773c9c13a3b0d13b7bddc3b3f6c93fd5298e792269bec6a21a3801b940c418af", size = 28526360, upload-time = "2025-11-18T06:48:42.018Z" }, - { url = "https://files.pythonhosted.org/packages/a8/cf/f52e33f141da7c026c0dfc922178b0a902f5e5486cffd5da1ba23796feaf/pyvoy-0.1.2-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:3437e4bcbc3f9c230ed974f0506e1d507c61385ba49def114eff52f12ce088d3", size = 30172731, upload-time = "2025-11-18T06:48:44.878Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d5/d16ff5de8f1372f2ba0170ffcace1a79482dcfdfdd64f625d3e959cb3699/pyvoy-0.1.2-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:04c2654124c8097dc0a904c8cd1896ef072c23e1e40997501c39a29074b9f16f", size = 32562083, upload-time = "2025-11-18T06:48:47.786Z" }, - { url = "https://files.pythonhosted.org/packages/f5/d3/cc2b423945ccf79ea9a6550aada535d655f32939bcbb69a594ad43e6f791/pyvoy-0.1.2-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:3cf86b31e007e8a2483c92ede7bc9c959600eebbfd1ac1254ddabce119333360", size = 28523951, upload-time = "2025-11-18T06:48:50.761Z" }, - { url = "https://files.pythonhosted.org/packages/97/e3/7a24e69471f3a400f39f82e5195af765ef124339b9d7a98620c27658312c/pyvoy-0.1.2-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:1caa920cee4b8a79ca3bb4e4788b0cfc7005dab396b2d892819c7e50f918d670", size = 30172138, upload-time = "2025-11-18T06:48:53.525Z" }, - { url = "https://files.pythonhosted.org/packages/7d/c0/731370402d774b41b786638d15a3378a5c1e2fe482032b7b40df6c90dadf/pyvoy-0.1.2-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:57bff2093d4d2958c9f2b50f67cc2e6ae9d53adc6de3d0a7a2c9d3d36f6f5b53", size = 32561464, upload-time = "2025-11-18T06:48:56.297Z" }, - { url = "https://files.pythonhosted.org/packages/58/67/1777827471efa575ae9586dc3b7ab4b331a15dbccf3d40f2c05311700574/pyvoy-0.1.2-cp314-cp314-manylinux_2_31_aarch64.whl", hash = "sha256:aef91cc81a7a584d937e12b2fa2baae451913a642c44eb8d771008da233b8e40", size = 28523669, upload-time = "2025-11-18T06:48:59.363Z" }, - { url = "https://files.pythonhosted.org/packages/2d/3a/2b8a0300e6bba250f724186f5eacd11e34afe69a358a91333ed4b274b4e6/pyvoy-0.1.2-cp314-cp314-manylinux_2_31_x86_64.whl", hash = "sha256:17606c3d7c5b9adffb7b4833772c1656924414685ea26747751c3a666b2ef15f", size = 30171296, upload-time = "2025-11-18T06:49:02.275Z" }, + { name = "uvloop", marker = "sys_platform != 'win32'" }, + { name = "winloop", marker = "sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/63/8cd2e57ab32344d070d52526ac7bc2c9529bd7994ff71adf3e6d17292874/pyvoy-0.2.0-cp310-cp310-macosx_15_0_arm64.whl", hash = "sha256:dae7d8f7d2d1986dc97076bda4d4eb3c1a083de0339accacf002c9569c346231", size = 605398, upload-time = "2025-12-17T04:13:58.556Z" }, + { url = "https://files.pythonhosted.org/packages/a4/bd/3d9a98d8ba3c34b02a764ddfa271d5c0f2d3c4e3c705ebec5ba7376e8e7e/pyvoy-0.2.0-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:f66afa5db0a6df613ffb2864a0945e3670bbddc0f70f523c9cb0996f50e69cf1", size = 659070, upload-time = "2025-12-17T04:14:00.447Z" }, + { url = "https://files.pythonhosted.org/packages/62/c2/77d5a7751b4efba5e5008681e49ea4f93732615269c11f072238c31ef6a2/pyvoy-0.2.0-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:02cf1b1904469b50edac788bafb45143d07848c1b53e42445d7847c2bdf230f5", size = 677985, upload-time = "2025-12-17T04:14:01.995Z" }, + { url = "https://files.pythonhosted.org/packages/bc/4b/71cb751c0ecb2be441d0f53e9eaed2ca7dced90ac978a402aceedb970419/pyvoy-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:650591de51e6187e1dc9d15fa8b0cde9ecd3e059cbc821b574567e33bce60883", size = 487059, upload-time = "2025-12-17T04:14:03.482Z" }, + { url = "https://files.pythonhosted.org/packages/bf/db/61b58aec8f3f9e272973f895f0561f25a36761eee36e9ca41512fe6e0669/pyvoy-0.2.0-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:c001b94e57c5a4da7a49cbab051d502d5768ca98b5f3bb584133faa28a1b9762", size = 605136, upload-time = "2025-12-17T04:14:04.985Z" }, + { url = "https://files.pythonhosted.org/packages/0c/56/d37ad15319f96d4c0cab2b821322a05e6b127c0f1f61fd8024b3676d0a5f/pyvoy-0.2.0-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:b328de9cf4d11037193245912b7e993d6a08037ab0fc7677ce1dc25b3a037af7", size = 658445, upload-time = "2025-12-17T04:14:06.561Z" }, + { url = "https://files.pythonhosted.org/packages/85/8e/6a0754ebe6f238fdc06c896b7bdadff377995ddb7f99e91edfeab3bbb0d0/pyvoy-0.2.0-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:e72b75427dd20529a6f5b06dbc00e81a4699486e5921a879bf9dfdabb2163661", size = 678072, upload-time = "2025-12-17T04:14:08.166Z" }, + { url = "https://files.pythonhosted.org/packages/39/83/b1774abf45865e6f2133fe53ca9beb60926a9cd3fb9ce77971f4c7f0f8a9/pyvoy-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f9cf0868f0996b01857edc940d67a0a4b0ce9e803d37f675f82ac3c27da06628", size = 486493, upload-time = "2025-12-17T04:14:09.912Z" }, + { url = "https://files.pythonhosted.org/packages/75/b4/952ef9a275af9b27a74e8566969c6e4806ca14bdce9c4ca310ee2583c3e8/pyvoy-0.2.0-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:b43633d171b49af0af8a25158a64ed94df241560654897f6aeb80d792307e578", size = 607081, upload-time = "2025-12-17T04:14:11.118Z" }, + { url = "https://files.pythonhosted.org/packages/ed/2d/75314db8b8527e3ab720afa5401017a5c20525f73a1468b6546f6196301a/pyvoy-0.2.0-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:1f60157c63400f610a56c8c6894846d2c120c16362559f1740e8e078b729cc19", size = 656994, upload-time = "2025-12-17T04:14:12.819Z" }, + { url = "https://files.pythonhosted.org/packages/04/88/01d141df5fa95fcd2cdce902c273fff03609b807042a8184690556337e64/pyvoy-0.2.0-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:4476301074363aac20d8c32319d50dfc745aee4461b3070e948995e7605afb06", size = 677372, upload-time = "2025-12-17T04:14:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/15/a8/97c4c508632f7895f57da6a2d89515a78c1b7dd920ea0bb2f160a4e65702/pyvoy-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:189879bc6d998134086f9ae187bb9ea301b4032b55630f79956460ab5349c294", size = 483483, upload-time = "2025-12-17T04:14:15.957Z" }, + { url = "https://files.pythonhosted.org/packages/b3/fc/3425bc531ad4072e3507de4318183011a82b73bf7071c5ec829e6b9a5ea1/pyvoy-0.2.0-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:0db70dc687273de2b63c381f3695f248259fbe50e286efdcb5f9ee49efc48536", size = 606265, upload-time = "2025-12-17T04:14:17.23Z" }, + { url = "https://files.pythonhosted.org/packages/da/2f/5043c8ddd5cbcc4099a6eeeaaeefb37da8d52abaa12c092b16135fd8782e/pyvoy-0.2.0-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:d1c81dcad3d8ef4cf14137436916492d18af6bcf206fb62c37f4e3768492291d", size = 657149, upload-time = "2025-12-17T04:14:18.9Z" }, + { url = "https://files.pythonhosted.org/packages/0c/38/80944408b23dcde98152b915480f1d27b5cdce2d2a9cf1246bc388110ff5/pyvoy-0.2.0-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:1124d34c8fd366c3371bd902b65a34a33bf1444364c4eef844a3464a260802c2", size = 676780, upload-time = "2025-12-17T04:14:20.19Z" }, + { url = "https://files.pythonhosted.org/packages/81/84/18be34c48a0815cc2081fe96e328c4c177a5d94165bd2c7e36f92bd43321/pyvoy-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9cfc147cec5dd7abbb91cb491aaee5d6bdd17dec5c7f87b3094248dcbc5cc74", size = 482931, upload-time = "2025-12-17T04:14:21.722Z" }, + { url = "https://files.pythonhosted.org/packages/dc/77/4fa07a8c560cb55cfd80a8414828f8cbbf0cae041fca58885ea8c4eb45b2/pyvoy-0.2.0-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:6ce897b68c65a9e512df11ed0d41d49e85dd0e6bf7317229dd38e487ab0f96bb", size = 608259, upload-time = "2025-12-17T04:14:23.28Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d5/9686cad0b8205e6741da4e6bed3a102fe1ac7514544d62d311f8d40cbbae/pyvoy-0.2.0-cp314-cp314-manylinux_2_31_aarch64.whl", hash = "sha256:e06f74e67152a194e385ca9afd09e1aa9992b73d7476d5b8f522631332b39faf", size = 657946, upload-time = "2025-12-17T04:14:24.755Z" }, + { url = "https://files.pythonhosted.org/packages/ae/34/b86345c8949ab5975ad8f20697720ad20b0d0730e0d0257acd5cb54616bd/pyvoy-0.2.0-cp314-cp314-manylinux_2_31_x86_64.whl", hash = "sha256:724e3b7ee7b9b12e19d2928818d492a19d394091c28fcb07901bf3e0e60a0e45", size = 678112, upload-time = "2025-12-17T04:14:26.097Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8d/eba6f0eb9a67cd19787caad699be76cd1db2782edc50517ceb7b30fb16e1/pyvoy-0.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:8d0b7a3ed2d125154c48e41eb841836f3d2a319f16588f3f2bff220e22c68e92", size = 502813, upload-time = "2025-12-17T04:14:27.629Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8c/fb65edbb853cd9d1363d7fb6f8c1b6e5268d6cd9b536bbae00b47d0e60be/pyvoy-0.2.0-cp314t-cp314t-macosx_15_0_arm64.whl", hash = "sha256:2a7d1272323e5e7d9bfba2e6f612857de742a475f379c785d4f98274c047ac26", size = 603789, upload-time = "2025-12-17T04:14:28.832Z" }, + { url = "https://files.pythonhosted.org/packages/b5/55/af525ab5f07a81baae412178716a40d509f52e10655086d55b9742d2b260/pyvoy-0.2.0-cp314t-cp314t-manylinux_2_31_aarch64.whl", hash = "sha256:6a5e6d57282f116f2660b78a08724ec46dd04d0963ce6e51f560e9bcd38545d4", size = 656164, upload-time = "2025-12-17T04:14:30.092Z" }, + { url = "https://files.pythonhosted.org/packages/16/77/2e8cbde5b25b4c6cff4b3c98f1e2d266a14dea6d6c35d9fff01a637637af/pyvoy-0.2.0-cp314t-cp314t-manylinux_2_31_x86_64.whl", hash = "sha256:2b566f039f606ff580b0d679285b1a583210f5e88178afd8fcf3e45f8ebe1948", size = 673419, upload-time = "2025-12-17T04:14:31.338Z" }, + { url = "https://files.pythonhosted.org/packages/7d/49/5b8f20da5ffb3b1e78cadab5aa99604e0c2fd2571a08805f0d63f74a2519/pyvoy-0.2.0-cp314t-cp314t-win_amd64.whl", hash = "sha256:66dae617d8b81fe1cee62257dc3ea0d12ef79de325f5c716239215611937e636", size = 504595, upload-time = "2025-12-17T04:14:33.048Z" }, ] [[package]] @@ -2306,6 +2454,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" }, ] +[[package]] +name = "winloop" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/bf/a42bcfa0457b8b4db25aad58ce0d9c4c560947200491429396f2e8afe2e2/winloop-0.4.0.tar.gz", hash = "sha256:0b829ad1db0fc602341bdaee905c300134e43e1a2f02d973349f642b562447fd", size = 2599958, upload-time = "2025-12-05T04:20:14.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/7a/40cbe7bcc131d6f34cf70928a0266ad4de14629a0819b6f75f70a7727e25/winloop-0.4.0-cp310-cp310-win32.whl", hash = "sha256:ff93562dab77496ace9b489c68e54a6576e8482b84e4c9d8a7371760dc7a7735", size = 553224, upload-time = "2025-12-05T04:19:44.944Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d6/49a07946abaa2db37f70184e2633a20639c0c0c9ffc9559970d3991013ef/winloop-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:3e92de6a3dd59f7c23bda343991d464b124f5d476f7cd46f3d143f9de435f8ad", size = 669251, upload-time = "2025-12-05T04:19:46.289Z" }, + { url = "https://files.pythonhosted.org/packages/08/12/fa4584b3ab80e9cdb3cfdacd297c7d2cec78ea7bad9a89a1936acbc760cd/winloop-0.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:36f16b7905c1f78d018bd954c6e2459e1307a76b176a1fc47761c8e0d3c62c8f", size = 553826, upload-time = "2025-12-05T04:19:47.736Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f7/54690675d4ba14c8ab7db66228006adc7d41e78fbabcac26a9eef12b4e5d/winloop-0.4.0-cp311-cp311-win32.whl", hash = "sha256:f82357eff683ef81809c6ea3f83be38708cdaf8857a0a12f02da963bfd8c1dd6", size = 551927, upload-time = "2025-12-05T04:19:49.421Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e8/5e3d186171ed113cd351fdac70eb1eabd64bfbd89d0b914137f69a496481/winloop-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8ca747bf40b2a16bc578ad49060d074ea64d62274c5ea07158038d040353bff", size = 677856, upload-time = "2025-12-05T04:19:51.191Z" }, + { url = "https://files.pythonhosted.org/packages/d9/12/f3db81714a74336c078df7cfb41fdac5c8085523c1e40399d383e01b6083/winloop-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:75fecdc2e742d891e001a0971466afba33e5a412707536e8a426b469afe1175c", size = 555826, upload-time = "2025-12-05T04:19:52.825Z" }, + { url = "https://files.pythonhosted.org/packages/c8/a1/2ee78c43854d94c5aa25009a22ebfc3a51191eb5c79d89ccd0cce34acb9b/winloop-0.4.0-cp312-cp312-win32.whl", hash = "sha256:1a05d2ec48d94cea68cb05471830731e8e2ac7167143a332f5f06453928e12b9", size = 556098, upload-time = "2025-12-05T04:19:54.019Z" }, + { url = "https://files.pythonhosted.org/packages/19/37/85ae0e2ef492981e32cb8aac0a757cc16f08e318216ce88e159426295850/winloop-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:a9a159c894bb7e8c23887d59dfe8a79986c4b242b14a87c0a6a71b0a018b032d", size = 671319, upload-time = "2025-12-05T04:19:55.156Z" }, + { url = "https://files.pythonhosted.org/packages/a6/4a/4502bbd59ac553d8c1201a5233f19d4f547e603fe3291a679de676d124fc/winloop-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:1bdc994762668964d3f58462b2d0bc15a2140c91b1b0f36c1053d319b478fa90", size = 552791, upload-time = "2025-12-05T04:19:56.734Z" }, + { url = "https://files.pythonhosted.org/packages/f9/a8/c5478e337afab8db18feeec8ab5fb7806d9e2253e607ee5c52811d85166b/winloop-0.4.0-cp313-cp313-win32.whl", hash = "sha256:dfd6cc1dd0d736183584929eb1d9a1fb4d93892bc4dc1d78185c64096ee0c136", size = 556165, upload-time = "2025-12-05T04:19:58.362Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f0/fa94513ead10b48d7e0f8b1c4847bfcf704fc4e55a0f05212cdd35b3303a/winloop-0.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:6c47bfe24ba2538bb8ff91b4d13a60005205cf6828b43329ca6aa896b4f8231d", size = 670742, upload-time = "2025-12-05T04:19:59.501Z" }, + { url = "https://files.pythonhosted.org/packages/bb/e4/4bac595d3ad6310d9918069ff49dfb069c34cea7f7d90f0b32459ae83816/winloop-0.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:79a09090e0c1df4b9fc7e07108997266d1d84c3efc6b38af63ee176fb828bc08", size = 551681, upload-time = "2025-12-05T04:20:00.809Z" }, + { url = "https://files.pythonhosted.org/packages/66/11/445ff2fa8616f3f14a840672d6f4595d642a82e6455807e5214a44ecf098/winloop-0.4.0-cp314-cp314-win32.whl", hash = "sha256:9c0bf270e176586a976ec592e25bf0e7630aa8fe47dff07f2547a227b28298ce", size = 563532, upload-time = "2025-12-05T04:20:01.974Z" }, + { url = "https://files.pythonhosted.org/packages/50/da/f602a163832fbd1c356257a43cf89b06d4701eab9de45d00e0afe5799734/winloop-0.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:2acc2c564e54564316485a85a5b8429e5c06f5cd05dfb0a80acea29423ef9093", size = 683892, upload-time = "2025-12-05T04:20:03.132Z" }, + { url = "https://files.pythonhosted.org/packages/42/91/69bf6c56c1f83753b6c57e1d9aac2735cfff580097216966ba0dbca20ce5/winloop-0.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:4b9c557a4af897de1bef38ffb2dd7e4e41f64c6259c20aca7b473a014e6a938a", size = 570469, upload-time = "2025-12-05T04:20:04.707Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fa/332deb741357be452505f9e5e5efda884c7472713ac5cf79757693527240/winloop-0.4.0-cp314-cp314t-win32.whl", hash = "sha256:368de86041fa34ff3571354e41b24ff27e73d44082303b33a3c18b9cf9c6970f", size = 673018, upload-time = "2025-12-05T04:20:06.01Z" }, + { url = "https://files.pythonhosted.org/packages/04/ea/64ed34b113580df8c59a59257cb606cba57a5f0e7e4022e996dde08b1791/winloop-0.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:38cd40f1b7494a61ef60b6f8d2ec3f7f14cbd083a3593b1eef9ae3a17c8f2651", size = 836973, upload-time = "2025-12-05T04:20:07.459Z" }, + { url = "https://files.pythonhosted.org/packages/12/9d/73d03bc2334c1bf088f2199c1346938abc446ead8512fa12081e004e2552/winloop-0.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:d029bcdd8ec66f810510bd10fedc2d0bcafb6423660284eefde2bdce7cfafe20", size = 601006, upload-time = "2025-12-05T04:20:08.737Z" }, +] + [[package]] name = "wsproto" version = "1.2.0"