diff --git a/README.md b/README.md index 0f5ba15..0fe944f 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,9 @@ plugins: out: gen - local: protoc-gen-connecpy out: gen + # Optional: Enable experimental Client Transport API + # opt: + # - transport_api=true ``` Then run: @@ -147,9 +150,16 @@ buf generate protoc --python_out=./ --pyi_out=./ --connecpy_out=./ ./haberdasher.proto ``` +### Generator Options + By default, naming follows PEP8 conventions. To use Google conventions, matching the output of grpc-python, add `--connecpy_opt=naming=google`. By default, imports are generated absolutely based on the proto package name. To use relative import, add `--connecpy_opt=imports=relative`. +For experimental Client Transport API support (see Client Transport API section below), add `--connecpy_opt=transport_api=true`: +```sh +protoc --python_out=./ --pyi_out=./ --connecpy_out=./ --connecpy_opt=transport_api=true ./haberdasher.proto +``` + ### Server code (ASGI) ```python @@ -566,6 +576,91 @@ On Windows, Content-Type: application/json, HTTP/2 curl --http2-prior-knowledge -X POST -H "Content-Type: application/json" -d '{\"inches\": 12}' -v http://localhost:3000/i2y.connecpy.example.Haberdasher/MakeHat ``` +## Client Transport API (Experimental) + +The Client Transport API provides a protocol-agnostic way to create RPC clients that can work with both Connect and gRPC protocols. This allows you to switch between protocols without changing your client code. + +**Note**: This feature must be explicitly enabled during code generation using the `transport_api=true` option (see Generator Options above). + +### Features + +- **Protocol Agnostic**: Write client code once, use with both Connect and gRPC +- **Type Safety**: Generated Protocol types ensure type-safe client interfaces +- **Seamless Integration**: Factory functions automatically handle protocol differences + +### Usage + +The protoc-gen-connecpy plugin automatically generates Client Transport API support alongside regular client code: + +```python +# Using Connect transport +from connecpy.transport.client import ConnectTransportAsync +from example.haberdasher_connecpy import create_client +from example.haberdasher_pb2 import Size + +async def connect_example(): + transport = ConnectTransportAsync("http://localhost:3000", proto_json=True) + client = create_client(transport) + + hat = await client.make_hat(Size(inches=12)) + print(f"Got hat: {hat.color}") + +# Using gRPC transport (requires grpcio) +from connecpy.transport.client import GrpcTransportAsync + +async def grpc_example(): + transport = GrpcTransportAsync("localhost:50051") + client = create_client(transport) + + hat = await client.make_hat(Size(inches=12)) + print(f"Got hat: {hat.color}") +``` + +### Synchronous API + +The Client Transport API also supports synchronous clients: + +```python +from connecpy.transport.client import ConnectTransport, GrpcTransport +from example.haberdasher_connecpy import create_client_sync + +# Connect transport (sync) +transport = ConnectTransport("http://localhost:3000") +client = create_client_sync(transport) +hat = client.make_hat(Size(inches=12)) + +# gRPC transport (sync) +transport = GrpcTransport("localhost:50051") +client = create_client_sync(transport) +hat = client.make_hat(Size(inches=12)) +``` + +### Advanced Configuration + +Both transports support advanced configuration options: + +```python +# Connect with compression and custom headers +transport = ConnectTransportAsync( + "http://localhost:3000", + proto_json=True, + accept_compression=["gzip", "br"], + send_compression="gzip", + timeout_ms=5000, +) + +# gRPC with TLS +import grpc +credentials = grpc.ssl_channel_credentials() +transport = GrpcTransportAsync( + "api.example.com:443", + credentials=credentials, + options=[("grpc.max_receive_message_length", 10000000)], +) +``` + +**Note**: The Client Transport API is experimental and the interface may change in future versions. For production use, consider using the standard `HaberdasherClient` and `HaberdasherClientSync` classes directly. + ## WSGI Support Connecpy provides full WSGI support via the `ConnecpyWSGIApplication`. This synchronous application adapts our service endpoints to the WSGI specification. It reads requests from the WSGI `environ`, processes requests, and returns responses using `start_response`. This enables integration with WSGI servers and middleware. diff --git a/example/example/eliza_connecpy.py b/example/example/eliza_connecpy.py index cb3bedc..c087fa6 100644 --- a/example/example/eliza_connecpy.py +++ b/example/example/eliza_connecpy.py @@ -2,7 +2,7 @@ # source: example/eliza.proto from collections.abc import AsyncIterator, Iterable, Iterator, Mapping -from typing import Protocol +from typing import ClassVar, Protocol from connecpy.client import ConnecpyClient, ConnecpyClientSync from connecpy.code import Code @@ -21,6 +21,35 @@ class ElizaService(Protocol): + """Service protocol for ElizaService.""" + + _service_info: ClassVar[dict] = { + "name": "connectrpc.eliza.v1.ElizaService", + "methods": { + "say": MethodInfo( + name="Say", + service_name="connectrpc.eliza.v1.ElizaService", + input=example_dot_eliza__pb2.SayRequest, + output=example_dot_eliza__pb2.SayResponse, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "converse": MethodInfo( + name="Converse", + service_name="connectrpc.eliza.v1.ElizaService", + input=example_dot_eliza__pb2.ConverseRequest, + output=example_dot_eliza__pb2.ConverseResponse, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "introduce": MethodInfo( + name="Introduce", + service_name="connectrpc.eliza.v1.ElizaService", + input=example_dot_eliza__pb2.IntroduceRequest, + output=example_dot_eliza__pb2.IntroduceResponse, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + }, + } + async def say( self, request: example_dot_eliza__pb2.SayRequest, ctx: RequestContext ) -> example_dot_eliza__pb2.SayResponse: @@ -155,6 +184,35 @@ def introduce( class ElizaServiceSync(Protocol): + """Synchronous service protocol for ElizaService.""" + + _service_info: ClassVar[dict] = { + "name": "connectrpc.eliza.v1.ElizaService", + "methods": { + "say": MethodInfo( + name="Say", + service_name="connectrpc.eliza.v1.ElizaService", + input=example_dot_eliza__pb2.SayRequest, + output=example_dot_eliza__pb2.SayResponse, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "converse": MethodInfo( + name="Converse", + service_name="connectrpc.eliza.v1.ElizaService", + input=example_dot_eliza__pb2.ConverseRequest, + output=example_dot_eliza__pb2.ConverseResponse, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "introduce": MethodInfo( + name="Introduce", + service_name="connectrpc.eliza.v1.ElizaService", + input=example_dot_eliza__pb2.IntroduceRequest, + output=example_dot_eliza__pb2.IntroduceResponse, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + }, + } + def say( self, request: example_dot_eliza__pb2.SayRequest, ctx: RequestContext ) -> example_dot_eliza__pb2.SayResponse: diff --git a/example/example/eliza_pb2.py b/example/example/eliza_pb2.py index fba9e93..16c3394 100644 --- a/example/example/eliza_pb2.py +++ b/example/example/eliza_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: example/eliza.proto -# Protobuf Python Version: 6.32.0 +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -11,9 +11,9 @@ from google.protobuf.internal import builder as _builder _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, - 6, - 32, - 0, + 5, + 29, + 3, '', 'example/eliza.proto' ) @@ -24,7 +24,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x65xample/eliza.proto\x12\x13\x63onnectrpc.eliza.v1\"(\n\nSayRequest\x12\x1a\n\x08sentence\x18\x01 \x01(\tR\x08sentence\")\n\x0bSayResponse\x12\x1a\n\x08sentence\x18\x01 \x01(\tR\x08sentence\"-\n\x0f\x43onverseRequest\x12\x1a\n\x08sentence\x18\x01 \x01(\tR\x08sentence\".\n\x10\x43onverseResponse\x12\x1a\n\x08sentence\x18\x01 \x01(\tR\x08sentence\"&\n\x10IntroduceRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"/\n\x11IntroduceResponse\x12\x1a\n\x08sentence\x18\x01 \x01(\tR\x08sentence2\x9c\x02\n\x0c\x45lizaService\x12M\n\x03Say\x12\x1f.connectrpc.eliza.v1.SayRequest\x1a .connectrpc.eliza.v1.SayResponse\"\x03\x90\x02\x01\x12]\n\x08\x43onverse\x12$.connectrpc.eliza.v1.ConverseRequest\x1a%.connectrpc.eliza.v1.ConverseResponse\"\x00(\x01\x30\x01\x12^\n\tIntroduce\x12%.connectrpc.eliza.v1.IntroduceRequest\x1a&.connectrpc.eliza.v1.IntroduceResponse\"\x00\x30\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x65xample/eliza.proto\x12\x13\x63onnectrpc.eliza.v1\"\x1e\n\nSayRequest\x12\x10\n\x08sentence\x18\x01 \x01(\t\"\x1f\n\x0bSayResponse\x12\x10\n\x08sentence\x18\x01 \x01(\t\"#\n\x0f\x43onverseRequest\x12\x10\n\x08sentence\x18\x01 \x01(\t\"$\n\x10\x43onverseResponse\x12\x10\n\x08sentence\x18\x01 \x01(\t\" \n\x10IntroduceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"%\n\x11IntroduceResponse\x12\x10\n\x08sentence\x18\x01 \x01(\t2\x9c\x02\n\x0c\x45lizaService\x12M\n\x03Say\x12\x1f.connectrpc.eliza.v1.SayRequest\x1a .connectrpc.eliza.v1.SayResponse\"\x03\x90\x02\x01\x12]\n\x08\x43onverse\x12$.connectrpc.eliza.v1.ConverseRequest\x1a%.connectrpc.eliza.v1.ConverseResponse\"\x00(\x01\x30\x01\x12^\n\tIntroduce\x12%.connectrpc.eliza.v1.IntroduceRequest\x1a&.connectrpc.eliza.v1.IntroduceResponse\"\x00\x30\x01\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,17 +34,17 @@ _globals['_ELIZASERVICE'].methods_by_name['Say']._loaded_options = None _globals['_ELIZASERVICE'].methods_by_name['Say']._serialized_options = b'\220\002\001' _globals['_SAYREQUEST']._serialized_start=44 - _globals['_SAYREQUEST']._serialized_end=84 - _globals['_SAYRESPONSE']._serialized_start=86 - _globals['_SAYRESPONSE']._serialized_end=127 - _globals['_CONVERSEREQUEST']._serialized_start=129 - _globals['_CONVERSEREQUEST']._serialized_end=174 - _globals['_CONVERSERESPONSE']._serialized_start=176 - _globals['_CONVERSERESPONSE']._serialized_end=222 - _globals['_INTRODUCEREQUEST']._serialized_start=224 - _globals['_INTRODUCEREQUEST']._serialized_end=262 - _globals['_INTRODUCERESPONSE']._serialized_start=264 - _globals['_INTRODUCERESPONSE']._serialized_end=311 - _globals['_ELIZASERVICE']._serialized_start=314 - _globals['_ELIZASERVICE']._serialized_end=598 + _globals['_SAYREQUEST']._serialized_end=74 + _globals['_SAYRESPONSE']._serialized_start=76 + _globals['_SAYRESPONSE']._serialized_end=107 + _globals['_CONVERSEREQUEST']._serialized_start=109 + _globals['_CONVERSEREQUEST']._serialized_end=144 + _globals['_CONVERSERESPONSE']._serialized_start=146 + _globals['_CONVERSERESPONSE']._serialized_end=182 + _globals['_INTRODUCEREQUEST']._serialized_start=184 + _globals['_INTRODUCEREQUEST']._serialized_end=216 + _globals['_INTRODUCERESPONSE']._serialized_start=218 + _globals['_INTRODUCERESPONSE']._serialized_end=255 + _globals['_ELIZASERVICE']._serialized_start=258 + _globals['_ELIZASERVICE']._serialized_end=542 # @@protoc_insertion_point(module_scope) diff --git a/example/example/haberdasher_pb2_grpc.py b/example/example/haberdasher_pb2_grpc.py new file mode 100644 index 0000000..4c4c6c4 --- /dev/null +++ b/example/example/haberdasher_pb2_grpc.py @@ -0,0 +1,343 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" + +import grpc +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + +from example import haberdasher_pb2 as example_dot_haberdasher__pb2 + +GRPC_GENERATED_VERSION = "1.74.0" +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f"The grpc package installed is at version {GRPC_VERSION}," + f" but the generated code in example/haberdasher_pb2_grpc.py depends on" + f" grpcio>={GRPC_GENERATED_VERSION}." + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + ) + + +class HaberdasherStub: + """A Haberdasher makes hats for clients.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.MakeHat = channel.unary_unary( + "/i2y.connecpy.example.Haberdasher/MakeHat", + request_serializer=example_dot_haberdasher__pb2.Size.SerializeToString, + response_deserializer=example_dot_haberdasher__pb2.Hat.FromString, + _registered_method=True, + ) + self.MakeFlexibleHat = channel.stream_unary( + "/i2y.connecpy.example.Haberdasher/MakeFlexibleHat", + request_serializer=example_dot_haberdasher__pb2.Size.SerializeToString, + response_deserializer=example_dot_haberdasher__pb2.Hat.FromString, + _registered_method=True, + ) + self.MakeSimilarHats = channel.unary_stream( + "/i2y.connecpy.example.Haberdasher/MakeSimilarHats", + request_serializer=example_dot_haberdasher__pb2.Size.SerializeToString, + response_deserializer=example_dot_haberdasher__pb2.Hat.FromString, + _registered_method=True, + ) + self.MakeVariousHats = channel.stream_stream( + "/i2y.connecpy.example.Haberdasher/MakeVariousHats", + request_serializer=example_dot_haberdasher__pb2.Size.SerializeToString, + response_deserializer=example_dot_haberdasher__pb2.Hat.FromString, + _registered_method=True, + ) + self.ListParts = channel.unary_stream( + "/i2y.connecpy.example.Haberdasher/ListParts", + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=example_dot_haberdasher__pb2.Hat.Part.FromString, + _registered_method=True, + ) + self.DoNothing = channel.unary_unary( + "/i2y.connecpy.example.Haberdasher/DoNothing", + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + _registered_method=True, + ) + + +class HaberdasherServicer: + """A Haberdasher makes hats for clients.""" + + def MakeHat(self, request, context): + """MakeHat produces a hat of mysterious, randomly-selected color!""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MakeFlexibleHat(self, request_iterator, context): + """MakeFlexibleHats produces a single hat adhering to many sizes.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MakeSimilarHats(self, request, context): + """MakeSimilarHats produces hats of mysterious, randomly-selected color following a single order!""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MakeVariousHats(self, request_iterator, context): + """MakeVariousHats produces hats of mysterious, randomly-selected color following many orders!""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListParts(self, request, context): + """ListParts lists available parts for making a hat.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DoNothing(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_HaberdasherServicer_to_server(servicer, server): + rpc_method_handlers = { + "MakeHat": grpc.unary_unary_rpc_method_handler( + servicer.MakeHat, + request_deserializer=example_dot_haberdasher__pb2.Size.FromString, + response_serializer=example_dot_haberdasher__pb2.Hat.SerializeToString, + ), + "MakeFlexibleHat": grpc.stream_unary_rpc_method_handler( + servicer.MakeFlexibleHat, + request_deserializer=example_dot_haberdasher__pb2.Size.FromString, + response_serializer=example_dot_haberdasher__pb2.Hat.SerializeToString, + ), + "MakeSimilarHats": grpc.unary_stream_rpc_method_handler( + servicer.MakeSimilarHats, + request_deserializer=example_dot_haberdasher__pb2.Size.FromString, + response_serializer=example_dot_haberdasher__pb2.Hat.SerializeToString, + ), + "MakeVariousHats": grpc.stream_stream_rpc_method_handler( + servicer.MakeVariousHats, + request_deserializer=example_dot_haberdasher__pb2.Size.FromString, + response_serializer=example_dot_haberdasher__pb2.Hat.SerializeToString, + ), + "ListParts": grpc.unary_stream_rpc_method_handler( + servicer.ListParts, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=example_dot_haberdasher__pb2.Hat.Part.SerializeToString, + ), + "DoNothing": grpc.unary_unary_rpc_method_handler( + servicer.DoNothing, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "i2y.connecpy.example.Haberdasher", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers( + "i2y.connecpy.example.Haberdasher", rpc_method_handlers + ) + + +# This class is part of an EXPERIMENTAL API. +class Haberdasher: + """A Haberdasher makes hats for clients.""" + + @staticmethod + def MakeHat( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/i2y.connecpy.example.Haberdasher/MakeHat", + example_dot_haberdasher__pb2.Size.SerializeToString, + example_dot_haberdasher__pb2.Hat.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def MakeFlexibleHat( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.stream_unary( + request_iterator, + target, + "/i2y.connecpy.example.Haberdasher/MakeFlexibleHat", + example_dot_haberdasher__pb2.Size.SerializeToString, + example_dot_haberdasher__pb2.Hat.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def MakeSimilarHats( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_stream( + request, + target, + "/i2y.connecpy.example.Haberdasher/MakeSimilarHats", + example_dot_haberdasher__pb2.Size.SerializeToString, + example_dot_haberdasher__pb2.Hat.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def MakeVariousHats( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.stream_stream( + request_iterator, + target, + "/i2y.connecpy.example.Haberdasher/MakeVariousHats", + example_dot_haberdasher__pb2.Size.SerializeToString, + example_dot_haberdasher__pb2.Hat.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def ListParts( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_stream( + request, + target, + "/i2y.connecpy.example.Haberdasher/ListParts", + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + example_dot_haberdasher__pb2.Hat.Part.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) + + @staticmethod + def DoNothing( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/i2y.connecpy.example.Haberdasher/DoNothing", + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/protoc-gen-connecpy/generator/config.go b/protoc-gen-connecpy/generator/config.go index c949ae8..7d67d09 100644 --- a/protoc-gen-connecpy/generator/config.go +++ b/protoc-gen-connecpy/generator/config.go @@ -34,6 +34,10 @@ type Config struct { // Imports is how to import dependencies in the generated code. Imports Imports + + // TransportAPI enables generation of experimental Transport API support. + // This includes Protocol types, gRPC wrappers, and factory functions. + TransportAPI bool } func parseConfig(p string) Config { @@ -64,6 +68,13 @@ func parseConfig(p string) Config { case "relative": cfg.Imports = ImportsRelative } + case "transport_api": + switch value { + case "true", "1", "yes": + cfg.TransportAPI = true + case "false", "0", "no": + cfg.TransportAPI = false + } } } return cfg diff --git a/protoc-gen-connecpy/generator/generator.go b/protoc-gen-connecpy/generator/generator.go index b7361c6..d029883 100644 --- a/protoc-gen-connecpy/generator/generator.go +++ b/protoc-gen-connecpy/generator/generator.go @@ -67,9 +67,10 @@ func GenerateConnecpyFile(fd protoreflect.FileDescriptor, conf Config) (*plugin. moduleName := strings.Join(strings.Split(fileNameWithoutSuffix, "/"), ".") vars := ConnecpyTemplateVariables{ - FileName: filename, - ModuleName: moduleName, - Imports: importStatements(fd, conf), + FileName: filename, + ModuleName: moduleName, + Imports: importStatements(fd, conf), + TransportAPI: conf.TransportAPI, } svcs := fd.Services() @@ -83,6 +84,7 @@ func GenerateConnecpyFile(fd protoreflect.FileDescriptor, conf Config) (*plugin. } methods := svc.Methods() + hasStreamingMethods := false for j := 0; j < methods.Len(); j++ { method := methods.Get(j) idempotencyLevel := "UNKNOWN" @@ -98,10 +100,13 @@ func GenerateConnecpyFile(fd protoreflect.FileDescriptor, conf Config) (*plugin. endpointType := "unary" if method.IsStreamingClient() && method.IsStreamingServer() { endpointType = "bidi_stream" + hasStreamingMethods = true } else if method.IsStreamingClient() { endpointType = "client_stream" + hasStreamingMethods = true } else if method.IsStreamingServer() { endpointType = "server_stream" + hasStreamingMethods = true } else if idempotencyLevel == "NO_SIDE_EFFECTS" { noSideEffects = true } @@ -122,6 +127,7 @@ func GenerateConnecpyFile(fd protoreflect.FileDescriptor, conf Config) (*plugin. connecpySvc.Methods = append(connecpySvc.Methods, connecpyMethod) } + connecpySvc.HasStreamingMethods = hasStreamingMethods vars.Services = append(vars.Services, connecpySvc) } @@ -215,8 +221,9 @@ func lastPart(imp string) string { func generateImport(pkg string, conf Config, isLocal bool) (string, ImportStatement) { name := moduleName(pkg) imp := ImportStatement{ - Name: name, - Alias: moduleAlias(pkg), + Name: name, + Alias: moduleAlias(pkg), + IsLocal: isLocal, } if isLocal && conf.Imports == ImportsRelative { name = lastPart(name) diff --git a/protoc-gen-connecpy/generator/generator_test.go b/protoc-gen-connecpy/generator/generator_test.go index a494ffa..3c3deca 100644 --- a/protoc-gen-connecpy/generator/generator_test.go +++ b/protoc-gen-connecpy/generator/generator_test.go @@ -86,6 +86,49 @@ func TestGenerateConnecpyFile(t *testing.T) { wantFile: "multi_connecpy.py", wantErr: false, }, + { + name: "service with streaming methods", + input: &descriptorpb.FileDescriptorProto{ + Name: proto.String("stream.proto"), + Package: proto.String("test"), + Service: []*descriptorpb.ServiceDescriptorProto{ + { + Name: proto.String("StreamService"), + Method: []*descriptorpb.MethodDescriptorProto{ + { + Name: proto.String("ServerStream"), + InputType: proto.String(".test.Request"), + OutputType: proto.String(".test.Response"), + ServerStreaming: proto.Bool(true), + }, + { + Name: proto.String("ClientStream"), + InputType: proto.String(".test.Request"), + OutputType: proto.String(".test.Response"), + ClientStreaming: proto.Bool(true), + }, + { + Name: proto.String("BidiStream"), + InputType: proto.String(".test.Request"), + OutputType: proto.String(".test.Response"), + ClientStreaming: proto.Bool(true), + ServerStreaming: proto.Bool(true), + }, + }, + }, + }, + MessageType: []*descriptorpb.DescriptorProto{ + { + Name: proto.String("Request"), + }, + { + Name: proto.String("Response"), + }, + }, + }, + wantFile: "stream_connecpy.py", + wantErr: false, + }, } for _, tt := range tests { @@ -106,9 +149,39 @@ func TestGenerateConnecpyFile(t *testing.T) { } content := got.GetContent() - if !strings.Contains(content, "from collections.abc import AsyncIterator, Iterable, Iterator, Mapping") { + // Check for base imports + if !strings.Contains(content, "from collections.abc import") || !strings.Contains(content, "Iterable") || !strings.Contains(content, "Mapping") { t.Error("Generated code missing required imports") } + + // Check for streaming imports based on whether service has streaming methods + hasStreaming := false + for _, service := range tt.input.GetService() { + for _, method := range service.GetMethod() { + if method.GetClientStreaming() || method.GetServerStreaming() { + hasStreaming = true + break + } + } + } + + if hasStreaming { + // Should have AsyncIterator and Iterator for streaming methods + if !strings.Contains(content, "AsyncIterator") { + t.Error("Generated code with streaming methods missing AsyncIterator import") + } + if !strings.Contains(content, "Iterator") { + t.Error("Generated code with streaming methods missing Iterator import") + } + } else { + // Should NOT have AsyncIterator and Iterator for non-streaming methods + if strings.Contains(content, "AsyncIterator") { + t.Error("Generated code without streaming methods should not have AsyncIterator import") + } + if strings.Contains(content, " Iterator,") || strings.Contains(content, "Iterator]") { + t.Error("Generated code without streaming methods should not have Iterator import") + } + } if !strings.Contains(content, "class "+strings.Split(tt.input.GetService()[0].GetName(), ".")[0]) { t.Error("Generated code missing service class") } diff --git a/protoc-gen-connecpy/generator/template.go b/protoc-gen-connecpy/generator/template.go index 456339a..3fd08a9 100644 --- a/protoc-gen-connecpy/generator/template.go +++ b/protoc-gen-connecpy/generator/template.go @@ -6,21 +6,24 @@ type ImportStatement struct { Name string Alias string Relative bool + IsLocal bool // true for local imports (like example.*), false for third-party (like google.*) } type ConnecpyTemplateVariables struct { - FileName string - ModuleName string - Imports []ImportStatement - Services []*ConnecpyService + FileName string + ModuleName string + Imports []ImportStatement + Services []*ConnecpyService + TransportAPI bool // Whether to generate Transport API support } type ConnecpyService struct { - Package string - Name string - FullName string - Comment string - Methods []*ConnecpyMethod + Package string + Name string + FullName string + Comment string + Methods []*ConnecpyMethod + HasStreamingMethods bool // Whether this service has any streaming methods } type ConnecpyMethod struct { @@ -40,29 +43,68 @@ type ConnecpyMethod struct { } // ConnecpyTemplate - Template for connecpy server and client -var ConnecpyTemplate = template.Must(template.New("ConnecpyTemplate").Parse(`# -*- coding: utf-8 -*- -# Generated by https://github.com/i2y/connecpy/v2/protoc-gen-connecpy. DO NOT EDIT! +var ConnecpyTemplate = template.Must(template.New("ConnecpyTemplate").Parse(`# Generated by https://github.com/i2y/connecpy/v2/protoc-gen-connecpy. DO NOT EDIT! # source: {{.FileName}} -{{if .Services}} -from collections.abc import AsyncIterator, Iterable, Iterator, Mapping -from typing import Protocol +{{if .Services}}{{if .TransportAPI}} +from __future__ import annotations + +import importlib +{{end}}{{- $hasStreaming := false}}{{- range .Services}}{{- if .HasStreamingMethods}}{{- $hasStreaming = true}}{{- end}}{{- end}} +from collections.abc import {{if $hasStreaming}}AsyncIterator, {{end}}Iterable{{if $hasStreaming}}, Iterator{{end}}, Mapping{{if .TransportAPI}} # noqa: TC003{{end}} +from typing import {{if .TransportAPI}}TYPE_CHECKING, {{end}}ClassVar, Protocol +{{- range .Imports }} +{{- if not .IsLocal }} + +{{if .Relative}}from . import {{.Name}}{{else}}import {{.Name}}{{end}} as {{.Alias}} +{{- end}} +{{- end}} from connecpy.client import ConnecpyClient, ConnecpyClientSync from connecpy.code import Code from connecpy.exceptions import ConnecpyException -from connecpy.interceptor import Interceptor, InterceptorSync +from connecpy.interceptor import Interceptor, InterceptorSync{{if .TransportAPI}} # noqa: TC001{{end}} from connecpy.method import IdempotencyLevel, MethodInfo -from connecpy.request import Headers, RequestContext -from connecpy.server import ConnecpyASGIApplication, ConnecpyWSGIApplication, Endpoint, EndpointSync - +from connecpy.request import Headers, RequestContext{{if .TransportAPI}} # noqa: TC001{{end}} +from connecpy.server import ( + ConnecpyASGIApplication, + ConnecpyWSGIApplication, + Endpoint, + EndpointSync, +) {{- range .Imports }} +{{- if .IsLocal }} + {{if .Relative}}from . import {{.Name}}{{else}}import {{.Name}}{{end}} as {{.Alias}} {{- end}} {{- end}} +{{- end}} +{{- if .TransportAPI}} + +if TYPE_CHECKING: + from connecpy.transport.client.connect import ConnectTransport + from connecpy.transport.client.connect_async import ConnectTransportAsync + from connecpy.transport.client.grpc import GrpcTransport + from connecpy.transport.client.grpc_async import GrpcTransportAsync +{{- end}} {{- range .Services}} -class {{.Name}}(Protocol):{{- range .Methods }} +class {{.Name}}(Protocol): + """Service protocol for {{.Name}}.""" + + _service_info: ClassVar[dict] = { + "name": "{{.FullName}}", + "methods": { {{- range .Methods }} + "{{.PythonName}}": MethodInfo( + name="{{.Name}}", + service_name="{{.ServiceName}}", + input={{.InputType}}, + output={{.OutputType}}, + idempotency_level=IdempotencyLevel.{{.IdempotencyLevel}}, + ),{{- end }} + } + } + {{- range .Methods }} {{if not .ResponseStream }}async {{end}}def {{.PythonName}}(self, request: {{if .RequestStream}}AsyncIterator[{{end}}{{.InputType}}{{if .RequestStream}}]{{end}}, ctx: RequestContext) -> {{if .ResponseStream}}AsyncIterator[{{end}}{{.OutputType}}{{if .ResponseStream}}]{{end}}: raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") {{ end }} @@ -120,7 +162,22 @@ class {{.Name}}Client(ConnecpyClient):{{range .Methods}} ) {{end}}{{- end }} {{range .Services}} -class {{.Name}}Sync(Protocol):{{- range .Methods }} +class {{.Name}}Sync(Protocol): + """Synchronous service protocol for {{.Name}}.""" + + _service_info: ClassVar[dict] = { + "name": "{{.FullName}}", + "methods": { {{- range .Methods }} + "{{.PythonName}}": MethodInfo( + name="{{.Name}}", + service_name="{{.ServiceName}}", + input={{.InputType}}, + output={{.OutputType}}, + idempotency_level=IdempotencyLevel.{{.IdempotencyLevel}}, + ),{{- end }} + } + } + {{- range .Methods }} def {{.PythonName}}(self, request: {{if .RequestStream}}Iterator[{{end}}{{.InputType}}{{if .RequestStream}}]{{end}}, ctx: RequestContext) -> {{if .ResponseStream}}Iterator[{{end}}{{.OutputType}}{{if .ResponseStream}}]{{end}}: raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") {{- end }} @@ -177,4 +234,227 @@ class {{.Name}}ClientSync(ConnecpyClientSync):{{range .Methods}} use_get=use_get, {{- end}} ) -{{end}}{{end}}`)) +{{end}}{{if $.TransportAPI}} + +# Client Protocol types for type-safe transport API +class {{.Name}}ClientProtocol(Protocol): + """Protocol defining the client interface for {{.Name}}.""" + {{- range .Methods}} + {{if not .ResponseStream}}async {{end}}def {{.PythonName}}( + self, + request: {{if .RequestStream}}AsyncIterator[{{end}}{{.InputType}}{{if .RequestStream}}]{{end}}, + *, + headers: Mapping[str, str] | None = None, + timeout_ms: int | None = None, + {{- if .NoSideEffects}} + use_get: bool = False, + {{- end}} + ) -> {{if .ResponseStream}}AsyncIterator[{{.OutputType}}]{{else}}{{.OutputType}}{{end}}: ... +{{- end}} + + +class {{.Name}}ClientSyncProtocol(Protocol): + """Protocol defining the synchronous client interface for {{.Name}}.""" + {{- range .Methods}} + def {{.PythonName}}( + self, + request: {{if .RequestStream}}Iterator[{{end}}{{.InputType}}{{if .RequestStream}}]{{end}}, + *, + headers: Mapping[str, str] | None = None, + timeout_ms: int | None = None, + {{- if .NoSideEffects}} + use_get: bool = False, + {{- end}} + ) -> {{if .ResponseStream}}Iterator[{{.OutputType}}]{{else}}{{.OutputType}}{{end}}: ... +{{- end}} + + +class {{.Name}}GrpcWrapper: + """Async gRPC stub wrapper implementing {{.Name}}ClientProtocol.""" + + def __init__(self, stub: object) -> None: + """Initialize with a gRPC async stub.""" + self._stub = stub + {{- range .Methods}} + + {{if not .ResponseStream}}async {{end}}def {{.PythonName}}( + self, + request: {{if .RequestStream}}AsyncIterator[{{end}}{{.InputType}}{{if .RequestStream}}]{{end}}, + *, + headers: Mapping[str, str] | None = None, + timeout_ms: int | None = None, + {{- if .NoSideEffects}} + use_get: bool = False, + {{- end}} + ) -> {{if .ResponseStream}}AsyncIterator[{{.OutputType}}]{{else}}{{.OutputType}}{{end}}: + """Call {{.Name}} via gRPC.""" + metadata = [(k.lower(), v) for k, v in (headers or {}).items()] + timeout = timeout_ms / 1000.0 if timeout_ms else None + {{- if .RequestStream}} + # Client streaming or bidi streaming + {{- if .ResponseStream}} + # Bidi streaming - return the stream directly + return self._stub.{{.Name}}(request, metadata=metadata, timeout=timeout) + {{- else}} + # Client streaming - await the result + return await self._stub.{{.Name}}(request, metadata=metadata, timeout=timeout) + {{- end}} + {{- else}}{{- if .ResponseStream}} + # Server streaming - return the stream directly + return self._stub.{{.Name}}(request, metadata=metadata, timeout=timeout) + {{- else}} + # Unary - await the result + return await self._stub.{{.Name}}(request, metadata=metadata, timeout=timeout) + {{- end}}{{- end}} + {{- end}} + + +class {{.Name}}GrpcWrapperSync: + """Sync gRPC stub wrapper implementing {{.Name}}ClientSyncProtocol.""" + + def __init__(self, stub: object) -> None: + """Initialize with a gRPC sync stub.""" + self._stub = stub + {{- range .Methods}} + + def {{.PythonName}}( + self, + request: {{if .RequestStream}}Iterator[{{end}}{{.InputType}}{{if .RequestStream}}]{{end}}, + *, + headers: Mapping[str, str] | None = None, + timeout_ms: int | None = None, + {{- if .NoSideEffects}} + use_get: bool = False, + {{- end}} + ) -> {{if .ResponseStream}}Iterator[{{.OutputType}}]{{else}}{{.OutputType}}{{end}}: + """Call {{.Name}} via gRPC.""" + metadata = [(k.lower(), v) for k, v in (headers or {}).items()] + timeout = timeout_ms / 1000.0 if timeout_ms else None + return self._stub.{{.Name}}(request, metadata=metadata, timeout=timeout) + {{- end}} + + +def create_client( + transport: ConnectTransportAsync | GrpcTransportAsync, +) -> {{.Name}}ClientProtocol: + """Create an async {{.Name}} client with the specified transport. + + Args: + transport: The transport to use (ConnectTransportAsync or GrpcTransportAsync) + + Returns: + A client implementing {{.Name}}ClientProtocol + """ + # Avoid circular imports by importing here + from connecpy.transport.client.connect_async import ConnectTransportAsync # noqa: PLC0415, I001 + from connecpy.transport.client.grpc_async import GrpcTransportAsync # noqa: PLC0415 + + if isinstance(transport, ConnectTransportAsync): + return {{.Name}}Client( + address=transport.address, + proto_json=transport.proto_json, + accept_compression=transport.accept_compression, + send_compression=transport.send_compression, + timeout_ms=transport.timeout_ms, + read_max_bytes=transport.read_max_bytes, + interceptors=transport.interceptors, + session=transport.session, + ) + if isinstance(transport, GrpcTransportAsync): + # Import grpc stub + module_parts = __name__.split(".") + if module_parts[-1].endswith("_connecpy"): + base_name = module_parts[-1][:-9] # Remove "_connecpy" + module_parts[-1] = f"{base_name}_pb2_grpc" + grpc_module_name = ".".join(module_parts) + else: + grpc_module_name = f"{__name__}_pb2_grpc" + + try: + grpc_mod = importlib.import_module(grpc_module_name) + except ImportError as e: + error_msg = ( + f"Failed to import gRPC stub module '{grpc_module_name}'. " + f"Make sure you generated the gRPC stubs using: " + f"python -m grpc_tools.protoc --grpc_python_out=. yourfile.proto" + ) + raise ImportError(error_msg) from e + + try: + stub_class = getattr(grpc_mod, "{{.Name}}Stub") # noqa: B009 + except AttributeError as e: + error_msg = ( + f"Could not find {{.Name}}Stub in '{grpc_module_name}'. " + f"This usually means the proto file was not compiled with gRPC support." + ) + raise AttributeError(error_msg) from e + + stub = stub_class(transport._channel) # noqa: SLF001 + return {{.Name}}GrpcWrapper(stub) + + error_msg = f"Unsupported transport type: {type(transport)}" + raise TypeError(error_msg) + + +def create_client_sync( + transport: ConnectTransport | GrpcTransport, +) -> {{.Name}}ClientSyncProtocol: + """Create a sync {{.Name}} client with the specified transport. + + Args: + transport: The transport to use (ConnectTransport or GrpcTransport) + + Returns: + A client implementing {{.Name}}ClientSyncProtocol + """ + # Avoid circular imports by importing here + from connecpy.transport.client.connect import ConnectTransport # noqa: PLC0415, I001 + from connecpy.transport.client.grpc import GrpcTransport # noqa: PLC0415 + + if isinstance(transport, ConnectTransport): + return {{.Name}}ClientSync( + address=transport.address, + proto_json=transport.proto_json, + accept_compression=transport.accept_compression, + send_compression=transport.send_compression, + timeout_ms=transport.timeout_ms, + read_max_bytes=transport.read_max_bytes, + interceptors=transport.interceptors, + session=transport.session, + ) + if isinstance(transport, GrpcTransport): + # Import grpc stub + module_parts = __name__.split(".") + if module_parts[-1].endswith("_connecpy"): + base_name = module_parts[-1][:-9] # Remove "_connecpy" + module_parts[-1] = f"{base_name}_pb2_grpc" + grpc_module_name = ".".join(module_parts) + else: + grpc_module_name = f"{__name__}_pb2_grpc" + + try: + grpc_mod = importlib.import_module(grpc_module_name) + except ImportError as e: + error_msg = ( + f"Failed to import gRPC stub module '{grpc_module_name}'. " + f"Make sure you generated the gRPC stubs using: " + f"python -m grpc_tools.protoc --grpc_python_out=. yourfile.proto" + ) + raise ImportError(error_msg) from e + + try: + stub_class = getattr(grpc_mod, "{{.Name}}Stub") # noqa: B009 + except AttributeError as e: + error_msg = ( + f"Could not find {{.Name}}Stub in '{grpc_module_name}'. " + f"This usually means the proto file was not compiled with gRPC support." + ) + raise AttributeError(error_msg) from e + + stub = stub_class(transport._channel) # noqa: SLF001 + return {{.Name}}GrpcWrapperSync(stub) + + error_msg = f"Unsupported transport type: {type(transport)}" + raise TypeError(error_msg) +{{- end}}{{- end}} +`)) diff --git a/protoc-gen-connecpy/generator/template_test.go b/protoc-gen-connecpy/generator/template_test.go index e4b0f41..77f2e65 100644 --- a/protoc-gen-connecpy/generator/template_test.go +++ b/protoc-gen-connecpy/generator/template_test.go @@ -36,7 +36,7 @@ func TestConnecpyTemplate(t *testing.T) { }, }, contains: []string{ - "from collections.abc import AsyncIterator, Iterable, Iterator, Mapping", + "from collections.abc import Iterable, Mapping", "class TestService(Protocol):", "class TestServiceASGIApplication(ConnecpyASGIApplication):", "def TestMethod", diff --git a/pyproject.toml b/pyproject.toml index b6a12a8..c0df164 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,10 @@ Repository = "https://github.com/i2y/connecpy.git" Issues = "https://github.com/i2y/connecpy/issues" Changelog = "https://github.com/i2y/connecpy/releases" +[project.optional-dependencies] +grpc = ["grpcio>=1.50.0"] +types = ["types-grpcio"] + [tool.uv] dev-dependencies = [ "asgiref", @@ -67,6 +71,7 @@ dev-dependencies = [ "pytest-asyncio>=0.25.2", "pytest-cov", "ruff>=0.9.4", + "types-grpcio", "typing_extensions", "zstandard", ] @@ -206,13 +211,16 @@ extend-ignore = [ "PERF", "D", ] +# Transport API needs to use Any for protocol definitions and has necessary runtime imports +"src/connecpy/transport/*.py" = ["ANN401", "TC001", "TC002", "TC003", "PERF203"] +"test/test_transport*.py" = ["ANN401"] [tool.ruff.lint.isort] split-on-trailing-comma = false [tool.ruff] # Don't run ruff on generated code from external plugins. -extend-exclude = ["*_pb2.py", "*_pb2.pyi"] +extend-exclude = ["*_pb2.py", "*_pb2.pyi", "*_pb2_grpc.py"] [tool.pyright] exclude = [ diff --git a/src/connecpy/transport/__init__.py b/src/connecpy/transport/__init__.py new file mode 100644 index 0000000..8da8529 --- /dev/null +++ b/src/connecpy/transport/__init__.py @@ -0,0 +1 @@ +"""Transport implementations for RPC communication.""" diff --git a/src/connecpy/transport/client/__init__.py b/src/connecpy/transport/client/__init__.py new file mode 100644 index 0000000..8e8cffa --- /dev/null +++ b/src/connecpy/transport/client/__init__.py @@ -0,0 +1,34 @@ +"""Client Transport API for protocol-agnostic RPC clients. + +WARNING: The Client Transport API is experimental and may change in future versions. +Use with caution in production environments. +""" + +import warnings + +from .base import CallOptions, RetryPolicy +from .client import create_client_sync +from .client_async import create_client +from .connect import ConnectTransport +from .connect_async import ConnectTransportAsync +from .grpc import GrpcTransport +from .grpc_async import GrpcTransportAsync + +# Emit a warning when the client_transport module is imported +warnings.warn( + "The Client Transport API is experimental and may change in future versions. " + "Use with caution in production environments.", + FutureWarning, + stacklevel=2, +) + +__all__ = [ + "CallOptions", + "ConnectTransport", + "ConnectTransportAsync", + "GrpcTransport", + "GrpcTransportAsync", + "RetryPolicy", + "create_client", + "create_client_sync", +] diff --git a/src/connecpy/transport/client/base.py b/src/connecpy/transport/client/base.py new file mode 100644 index 0000000..ed5c85e --- /dev/null +++ b/src/connecpy/transport/client/base.py @@ -0,0 +1,229 @@ +"""Base classes and protocols for the Transport API.""" + +from __future__ import annotations + +from collections.abc import Iterator +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, Protocol + +from connecpy.code import Code + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + + from connecpy.interceptor import Interceptor, InterceptorSync + from connecpy.method import MethodInfo + + +class TransportProtocol(Protocol): + """Protocol for transport implementations. + + Transport implementations provide a protocol-agnostic interface for making + RPC calls. Request and response types are protobuf Message instances. + """ + + def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: + """Execute a unary-unary RPC. + + Args: + method: Method information including service and method names + request: The protobuf request message + call_options: Optional call-specific options + + Returns: + The protobuf response message + """ + ... + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Iterator[Any]: + """Execute a unary-stream RPC. + + Args: + method: Method information including service and method names + request: The protobuf request message + call_options: Optional call-specific options + + Returns: + Iterator of protobuf response messages + """ + ... + + def stream_unary( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Any: + """Execute a stream-unary RPC. + + Args: + method: Method information including service and method names + stream: Iterator of protobuf request messages + call_options: Optional call-specific options + + Returns: + The protobuf response message + """ + ... + + def stream_stream( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Iterator[Any]: + """Execute a stream-stream RPC. + + Args: + method: Method information including service and method names + stream: Iterator of protobuf request messages + call_options: Optional call-specific options + + Returns: + Iterator of protobuf response messages + """ + ... + + def close(self) -> None: + """Close the transport and release resources.""" + ... + + +class AsyncTransportProtocol(Protocol): + """Protocol for async transport implementations. + + Async transport implementations provide a protocol-agnostic interface for making + asynchronous RPC calls. Request and response types are protobuf Message instances. + """ + + async def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: + """Execute a unary-unary RPC asynchronously. + + Args: + method: Method information including service and method names + request: The protobuf request message + call_options: Optional call-specific options + + Returns: + The protobuf response message + """ + ... + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> AsyncIterator[Any]: + """Execute a unary-stream RPC asynchronously. + + Args: + method: Method information including service and method names + request: The protobuf request message + call_options: Optional call-specific options + + Returns: + Async iterator of protobuf response messages + """ + ... + + async def stream_unary( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> Any: + """Execute a stream-unary RPC asynchronously. + + Args: + method: Method information including service and method names + stream: Async iterator of protobuf request messages + call_options: Optional call-specific options + + Returns: + The protobuf response message + """ + ... + + def stream_stream( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> AsyncIterator[Any]: + """Execute a stream-stream RPC asynchronously. + + Args: + method: Method information including service and method names + stream: Async iterator of protobuf request messages + call_options: Optional call-specific options + + Returns: + Async iterator of protobuf response messages + """ + ... + + async def close(self) -> None: + """Close the transport and release resources.""" + ... + + +@dataclass +class RetryPolicy: + """Configuration for automatic retry behavior. + + Attributes: + max_attempts: Maximum number of attempts (including initial) + initial_backoff_ms: Initial backoff in milliseconds + max_backoff_ms: Maximum backoff in milliseconds + backoff_multiplier: Multiplier for exponential backoff + retryable_codes: List of error codes that trigger retry + """ + + max_attempts: int = 3 + initial_backoff_ms: int = 100 + max_backoff_ms: int = 5000 + backoff_multiplier: float = 2.0 + retryable_codes: list[Code] | None = None + + def __post_init__(self) -> None: + """Set default retryable codes if not provided.""" + if self.retryable_codes is None: + self.retryable_codes = [Code.UNAVAILABLE, Code.DEADLINE_EXCEEDED] + + +@dataclass +class TransportOptions: + """Options for transport configuration. + + Attributes: + timeout_ms: Default timeout in milliseconds + retry_policy: Default retry policy + interceptors: List of interceptors + compression: Compression algorithm to use + headers: Additional headers to include + """ + + timeout_ms: int | None = None + retry_policy: RetryPolicy | None = None + interceptors: list[Interceptor | InterceptorSync] = field(default_factory=list) + compression: str | None = None + headers: dict[str, str] = field(default_factory=dict) + + +@dataclass +class CallOptions: + """Options for individual RPC calls. + + Attributes: + timeout_ms: Timeout for this call (overrides transport default) + retry_policy: Retry policy for this call (overrides transport default) + headers: Additional headers for this call + """ + + timeout_ms: int | None = None + retry_policy: RetryPolicy | None = None + headers: dict[str, str] = field(default_factory=dict) diff --git a/src/connecpy/transport/client/client.py b/src/connecpy/transport/client/client.py new file mode 100644 index 0000000..e4194be --- /dev/null +++ b/src/connecpy/transport/client/client.py @@ -0,0 +1,180 @@ +"""Unified client creation for Connect and gRPC protocols.""" + +from __future__ import annotations + +import importlib +from typing import TYPE_CHECKING, Any, Protocol + +from .base import CallOptions +from .connect import ConnectTransport +from .grpc import GrpcTransport + +if TYPE_CHECKING: + from collections.abc import Iterator + + from connecpy.method import MethodInfo + + +class Transport(Protocol): + """Protocol for transport implementations.""" + + def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: ... + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Iterator[Any]: ... + + def stream_unary( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Any: ... + + def stream_stream( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Iterator[Any]: ... + + def close(self) -> None: ... + + +def create_client_sync( + service_class: type[Any], transport: Transport | ConnectTransport | GrpcTransport +) -> Any: + """Create a synchronous client for the given service using the specified transport. + + Args: + service_class: The service class containing metadata (e.g., Haberdasher) + transport: The transport to use (Connect or gRPC) + + Returns: + A client instance appropriate for the transport type + + Example: + ```python + # For Connect protocol + from connecpy.transport import ConnectTransport, create_client_sync + from example.haberdasher_connecpy import Haberdasher, HaberdasherClientSync + + connect_transport = ConnectTransport("http://localhost:3000") + client = create_client_sync(Haberdasher, connect_transport) + # Returns HaberdasherClientSync instance + + # For gRPC protocol + from connecpy.transport import GrpcTransport, create_client_sync + + grpc_transport = GrpcTransport("localhost:50051") + client = create_client_sync(Haberdasher, grpc_transport) + # Returns GrpcClientWrapper wrapping HaberdasherStub + ``` + """ + if isinstance(transport, ConnectTransport): + # For Connect transport, return the existing ConnecpyClientSync-based client + # This assumes the service_class has a reference to the client class + # In practice, this would be generated by protoc-gen-connecpy + + # Handle both Haberdasher and HaberdasherSync class names + base_name = service_class.__name__ + base_name = base_name.removesuffix("Sync") # Remove "Sync" suffix + + client_class_name = f"{base_name}ClientSync" + module = service_class.__module__ + + # Import the client class dynamically + mod = importlib.import_module(module) + client_class = getattr(mod, client_class_name) + + # Create the client with the transport's parameters + return client_class( + address=transport.address, + proto_json=transport.proto_json, + accept_compression=transport.accept_compression, + send_compression=transport.send_compression, + timeout_ms=transport.timeout_ms, + read_max_bytes=transport.read_max_bytes, + interceptors=transport.interceptors, + session=transport.session, + ) + + if isinstance(transport, GrpcTransport): + # For gRPC transport, use the generated static wrapper + # Handle both Haberdasher and HaberdasherSync class names + base_name = service_class.__name__ + base_name = base_name.removesuffix("Sync") # Remove "Sync" suffix + + wrapper_class_name = f"{base_name}GrpcWrapperSync" + stub_class_name = f"{base_name}Stub" + module = service_class.__module__ + + # Import the wrapper class from the same module as the service + mod = importlib.import_module(module) + wrapper_class = getattr(mod, wrapper_class_name) + + # Try to import the gRPC stub from the _pb2_grpc module + module_parts = module.split(".") + if module_parts[-1].endswith("_connecpy"): + # Replace _connecpy with _pb2_grpc + base_name_without_suffix = module_parts[-1][:-9] # Remove "_connecpy" + module_parts[-1] = f"{base_name_without_suffix}_pb2_grpc" + grpc_module_name = ".".join(module_parts) + else: + # Fallback: try adding _pb2_grpc + base_module = module.rsplit(".", 1)[0] + grpc_module_name = f"{base_module}_pb2_grpc" + + try: + grpc_mod = importlib.import_module(grpc_module_name) + stub_class = getattr(grpc_mod, stub_class_name) + + # Create the stub with the transport's channel + stub = stub_class(transport._channel) # noqa: SLF001 + + # Create and return the wrapper + return wrapper_class(stub) # type: ignore[return-value] + except (ImportError, AttributeError) as e: + msg = ( + f"Could not import gRPC stub {stub_class_name} from {grpc_module_name}. " + f"Make sure the proto file was compiled with grpc_tools: {e}" + ) + raise ImportError(msg) from e + + else: + # Generic transport with Transport protocol + # Create a dynamic client that uses the transport methods + class DynamicClient: + def __init__(self, transport: Transport, service_info: Any) -> None: + self._transport = transport + self._service_info = service_info + + # Create methods dynamically + for method_name, method_info in service_info.methods.items(): + method = self._create_method(method_info) + setattr(self, method_name, method) + + def _create_method(self, method_info: MethodInfo) -> Any: + def method_impl( + request: Any, + *, + headers: dict[str, str] | None = None, + timeout_ms: int | None = None, + ) -> Any: + call_options = CallOptions( + headers=headers or {}, timeout_ms=timeout_ms + ) + + # Determine the RPC type and call appropriate transport method + # This would need to be determined from method_info + # For now, assume unary-unary + return self._transport.unary_unary( + method_info, request, call_options + ) + + return method_impl + + service_info = getattr(service_class, "_service_info", None) + return DynamicClient(transport, service_info) # type: ignore[arg-type] diff --git a/src/connecpy/transport/client/client_async.py b/src/connecpy/transport/client/client_async.py new file mode 100644 index 0000000..744b4ca --- /dev/null +++ b/src/connecpy/transport/client/client_async.py @@ -0,0 +1,186 @@ +"""Asynchronous client creation for Connect and gRPC protocols.""" + +from __future__ import annotations + +import importlib +from typing import TYPE_CHECKING, Any, Protocol + +from .base import CallOptions +from .connect_async import ConnectTransportAsync +from .grpc_async import GrpcTransportAsync + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + + from connecpy.method import MethodInfo + + +class TransportAsync(Protocol): + """Protocol for async transport implementations.""" + + async def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: ... + + async def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> AsyncIterator[Any]: ... + + async def stream_unary( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> Any: ... + + async def stream_stream( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> AsyncIterator[Any]: ... + + async def close(self) -> None: ... + + +def create_client( + service_class: type[Any], + transport: TransportAsync | ConnectTransportAsync | GrpcTransportAsync, +) -> Any: + """Create an asynchronous client for the given service using the specified transport. + + Args: + service_class: The service class containing metadata (e.g., Haberdasher) + transport: The async transport to use (Connect or gRPC) + + Returns: + An async client instance appropriate for the transport type + + Example: + ```python + # For Connect protocol + from connecpy.transport import ConnectTransportAsync, create_client + from example.haberdasher_connecpy import Haberdasher + + connect_transport = ConnectTransportAsync("http://localhost:3000") + client = create_client(Haberdasher, connect_transport) + # Returns HaberdasherClient instance + + # For gRPC protocol with async stub + from connecpy.transport import GrpcTransportAsync, create_client + + grpc_transport = GrpcTransportAsync("localhost:50051") + client = create_client(Haberdasher, grpc_transport) + # Returns GrpcClientWrapperAsync wrapping async gRPC stub + ``` + """ + if isinstance(transport, ConnectTransportAsync): + # For Connect transport, return the existing ConnecpyClient-based client + # Handle both Haberdasher and HaberdasherSync class names + base_name = service_class.__name__ + base_name = base_name.removesuffix("Sync") # Remove "Sync" suffix + + client_class_name = ( + f"{base_name}Client" # Async client doesn't have Sync suffix + ) + module = service_class.__module__ + + # Import the client class dynamically + mod = importlib.import_module(module) + client_class = getattr(mod, client_class_name) + + # Create the client with the transport's parameters + return client_class( + address=transport.address, + proto_json=transport.proto_json, + accept_compression=transport.accept_compression, + send_compression=transport.send_compression, + timeout_ms=transport.timeout_ms, + read_max_bytes=transport.read_max_bytes, + interceptors=transport.interceptors, + session=transport.session, + ) + + if isinstance(transport, GrpcTransportAsync): + # For gRPC transport, use the generated static wrapper + # Handle both Haberdasher and HaberdasherSync class names + base_name = service_class.__name__ + base_name = base_name.removesuffix("Sync") # Remove "Sync" suffix + + wrapper_class_name = ( + f"{base_name}GrpcWrapper" # Async wrapper doesn't have Sync suffix + ) + stub_class_name = f"{base_name}Stub" + module = service_class.__module__ + + # Import the wrapper class from the same module as the service + mod = importlib.import_module(module) + wrapper_class = getattr(mod, wrapper_class_name) + + # Try to import the gRPC stub from the _pb2_grpc module + module_parts = module.split(".") + if module_parts[-1].endswith("_connecpy"): + # Replace _connecpy with _pb2_grpc + base_name_without_suffix = module_parts[-1][:-9] # Remove "_connecpy" + module_parts[-1] = f"{base_name_without_suffix}_pb2_grpc" + grpc_module_name = ".".join(module_parts) + else: + # Fallback: try adding _pb2_grpc + base_module = module.rsplit(".", 1)[0] + grpc_module_name = f"{base_module}_pb2_grpc" + + try: + grpc_mod = importlib.import_module(grpc_module_name) + stub_class = getattr(grpc_mod, stub_class_name) + + # Create the stub with the transport's channel + stub = stub_class(transport._channel) # noqa: SLF001 + + # Create and return the wrapper + return wrapper_class(stub) # type: ignore[return-value] + except (ImportError, AttributeError) as e: + msg = ( + f"Could not import gRPC stub {stub_class_name} from {grpc_module_name}. " + f"Make sure the proto file was compiled with grpc_tools: {e}" + ) + raise ImportError(msg) from e + + else: + # Generic transport with TransportAsync protocol + class DynamicAsyncClient: + def __init__(self, transport: TransportAsync, service_info: Any) -> None: + self._transport = transport + self._service_info = service_info + + # Create methods dynamically + methods = ( + service_info.get("methods", {}) + if isinstance(service_info, dict) + else getattr(service_info, "methods", {}) + ) + for method_name, method_info in methods.items(): + method = self._create_method(method_info) + setattr(self, method_name, method) + + def _create_method(self, method_info: MethodInfo) -> Any: + async def method_impl( + request: Any, + *, + headers: dict[str, str] | None = None, + timeout_ms: int | None = None, + ) -> Any: + call_options = CallOptions( + headers=headers or {}, timeout_ms=timeout_ms + ) + + # Determine the RPC type and call appropriate transport method + # This would need to be determined from method_info + # For now, assume unary-unary + return await self._transport.unary_unary( + method_info, request, call_options + ) + + return method_impl + + service_info = getattr(service_class, "_service_info", None) + return DynamicAsyncClient(transport, service_info) # type: ignore[arg-type] diff --git a/src/connecpy/transport/client/connect.py b/src/connecpy/transport/client/connect.py new file mode 100644 index 0000000..1611765 --- /dev/null +++ b/src/connecpy/transport/client/connect.py @@ -0,0 +1,286 @@ +"""Connect protocol transport implementation.""" + +from __future__ import annotations + +import time +import types +from collections.abc import Iterable, Iterator +from typing import TYPE_CHECKING, Any + +import httpx +from typing_extensions import Self + +from connecpy._client_sync import ConnecpyClientSync +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.interceptor import InterceptorSync + +from .base import CallOptions, RetryPolicy + +if TYPE_CHECKING: + from connecpy.method import MethodInfo + + +class ConnectTransport: + """Transport implementation using the Connect protocol. + + This transport wraps the existing ConnecpyClientSync to provide + a protocol-agnostic interface. It accepts all the same parameters + as ConnecpyClientSync for full compatibility. + """ + + def __init__( + self, + address: str, + *, + proto_json: bool = False, + accept_compression: Iterable[str] | None = None, + send_compression: str | None = None, + timeout_ms: int | None = None, + read_max_bytes: int | None = None, + interceptors: Iterable[InterceptorSync] = (), + session: httpx.Client | None = None, + ) -> None: + """Initialize the Connect transport with all ConnecpyClientSync parameters. + + Args: + address: The address of the server to connect to, including scheme + (e.g., "http://localhost:3000" or "https://api.example.com") + proto_json: Whether to use JSON for the protocol + accept_compression: A list of compression algorithms to accept from the server + send_compression: The compression algorithm to use for sending requests + timeout_ms: The timeout for requests in milliseconds + read_max_bytes: The maximum number of bytes to read from the response + interceptors: A list of interceptors to apply to requests + session: An httpx Client to use for requests (useful for custom TLS config) + """ + self._address = address + self._client_kwargs = { + "proto_json": proto_json, + "accept_compression": accept_compression, + "send_compression": send_compression, + "timeout_ms": timeout_ms, + "read_max_bytes": read_max_bytes, + "interceptors": interceptors, + "session": session, + } + + # Create the underlying client + self._client = ConnecpyClientSync(address, **self._client_kwargs) + + # Store for later use in create_client + self.address = address + self.proto_json = proto_json + self.accept_compression = accept_compression + self.send_compression = send_compression + self.timeout_ms = timeout_ms + self.read_max_bytes = read_max_bytes + self.interceptors = interceptors + self.session = session + + def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: + """Execute a unary-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + def execute() -> Any: + return self._call_unary(method, request, call_options) + + if call_options.retry_policy: + return self._execute_with_retry(execute, call_options.retry_policy) + return execute() + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Iterator[Any]: + """Execute a unary-stream RPC.""" + call_options = call_options or CallOptions() + return self._call_server_stream(method, request, call_options) + + def stream_unary( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Any: + """Execute a stream-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + def execute() -> Any: + return self._call_client_stream(method, stream, call_options) + + if call_options.retry_policy: + return self._execute_with_retry(execute, call_options.retry_policy) + return execute() + + def stream_stream( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Iterator[Any]: + """Execute a stream-stream RPC.""" + call_options = call_options or CallOptions() + return self._call_bidi_stream(method, stream, call_options) + + def close(self) -> None: + """Close the underlying client.""" + self._client.close() + + def __enter__(self) -> Self: + """Enter the context manager.""" + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + """Exit the context manager and close resources.""" + self.close() + + def _merge_options(self, call_options: CallOptions | None) -> CallOptions: + """Merge call options with transport defaults.""" + if not call_options: + return CallOptions( + timeout_ms=self.timeout_ms, retry_policy=None, headers={} + ) + + return CallOptions( + timeout_ms=call_options.timeout_ms or self.timeout_ms, + retry_policy=call_options.retry_policy, + headers=call_options.headers.copy(), + ) + + def _execute_with_retry(self, func: Any, retry_policy: RetryPolicy) -> Any: + """Execute a function with retry logic.""" + attempt = 0 + backoff_ms = retry_policy.initial_backoff_ms + + while attempt < retry_policy.max_attempts: + try: + return func() + except ConnecpyException as e: + # Check if the error is retryable + if ( + retry_policy.retryable_codes is None + or e.code not in retry_policy.retryable_codes + ): + raise + + # Check if we've exhausted retries + if attempt >= retry_policy.max_attempts - 1: + raise + + # Wait before retry with exponential backoff + time.sleep(backoff_ms / 1000.0) + backoff_ms = min( + int(backoff_ms * retry_policy.backoff_multiplier), + retry_policy.max_backoff_ms, + ) + attempt += 1 + + # Should never reach here + msg = "Retry loop exited unexpectedly" + raise RuntimeError(msg) + + def _validate_and_get_timeout(self, options: CallOptions) -> int | None: + """Validate timeout value and return the effective timeout. + + Args: + options: Call options containing timeout settings + + Returns: + The effective timeout in milliseconds, or None for infinite timeout + + Raises: + ValueError: If timeout is invalid (negative or too large) + """ + timeout_ms = ( + options.timeout_ms if options.timeout_ms is not None else self.timeout_ms + ) + if timeout_ms is not None: + if timeout_ms <= 0: + msg = f"Timeout must be positive, got {timeout_ms}ms" + raise ValueError(msg) + if timeout_ms > 8640000000: # 100 days max (protocol supports 100+ days) + msg = f"Timeout too large ({timeout_ms}ms), max is 100 days (8640000000ms)" + raise ValueError(msg) + return timeout_ms + + def _handle_timeout_error(self, e: httpx.TimeoutException) -> None: + """Convert HTTP timeout exception to ConnecpyException. + + Args: + e: The HTTP timeout exception + + Raises: + ConnecpyException: Always raises with DEADLINE_EXCEEDED code + """ + raise ConnecpyException(Code.DEADLINE_EXCEEDED, f"Request timeout: {e}") from e + + def _call_unary( + self, method: MethodInfo, request: Any, options: CallOptions + ) -> Any: + """Internal method to call unary RPC through the client.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return self._client.execute_unary( + request=request, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker + + def _call_server_stream( + self, method: MethodInfo, request: Any, options: CallOptions + ) -> Iterator[Any]: + """Internal method to call server streaming RPC.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return self._client.execute_server_stream( + request=request, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker + + def _call_client_stream( + self, method: MethodInfo, stream: Iterator[Any], options: CallOptions + ) -> Any: + """Internal method to call client streaming RPC.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return self._client.execute_client_stream( + request=stream, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker + + def _call_bidi_stream( + self, method: MethodInfo, stream: Iterator[Any], options: CallOptions + ) -> Iterator[Any]: + """Internal method to call bidirectional streaming RPC.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return self._client.execute_bidi_stream( + request=stream, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker diff --git a/src/connecpy/transport/client/connect_async.py b/src/connecpy/transport/client/connect_async.py new file mode 100644 index 0000000..a06eb56 --- /dev/null +++ b/src/connecpy/transport/client/connect_async.py @@ -0,0 +1,273 @@ +"""Async Connect protocol transport implementation.""" + +from __future__ import annotations + +import asyncio +import types +from collections.abc import AsyncIterator, Iterable +from typing import TYPE_CHECKING, Any + +import httpx +from typing_extensions import Self + +from connecpy.client import ConnecpyClient +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.interceptor import Interceptor + +from .base import CallOptions, RetryPolicy + +if TYPE_CHECKING: + from connecpy.method import MethodInfo + + +class ConnectTransportAsync: + """Async transport implementation using the Connect protocol. + + This transport wraps the existing ConnecpyClient to provide + a protocol-agnostic interface. It accepts all the same parameters + as ConnecpyClient for full compatibility. + """ + + def __init__( + self, + address: str, + *, + proto_json: bool = False, + accept_compression: Iterable[str] | None = None, + send_compression: str | None = None, + timeout_ms: int | None = None, + read_max_bytes: int | None = None, + interceptors: Iterable[Interceptor] = (), + session: httpx.AsyncClient | None = None, + ) -> None: + """Initialize the async Connect transport with all ConnecpyClient parameters. + + Args: + address: The address of the server to connect to, including scheme + (e.g., "http://localhost:3000" or "https://api.example.com") + proto_json: Whether to use JSON for the protocol + accept_compression: A list of compression algorithms to accept from the server + send_compression: The compression algorithm to use for sending requests + timeout_ms: The timeout for requests in milliseconds + read_max_bytes: The maximum number of bytes to read from the response + interceptors: A list of interceptors to apply to requests + session: An httpx AsyncClient to use for requests (useful for custom TLS config) + """ + self._address = address + self._client_kwargs = { + "proto_json": proto_json, + "accept_compression": accept_compression, + "send_compression": send_compression, + "timeout_ms": timeout_ms, + "read_max_bytes": read_max_bytes, + "interceptors": interceptors, + "session": session, + } + + # Create the underlying async client + self._client = ConnecpyClient(address, **self._client_kwargs) + + # Store for later use in create_client + self.address = address + self.proto_json = proto_json + self.accept_compression = accept_compression + self.send_compression = send_compression + self.timeout_ms = timeout_ms + self.read_max_bytes = read_max_bytes + self.interceptors = interceptors + self.session = session + + async def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: + """Execute a unary-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + async def execute() -> Any: + return await self._call_unary(method, request, call_options) + + if call_options.retry_policy: + return await self._execute_with_retry(execute, call_options.retry_policy) + return await execute() + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> AsyncIterator[Any]: + """Execute a unary-stream RPC.""" + call_options = call_options or CallOptions() + return self._call_server_stream(method, request, call_options) + + async def stream_unary( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> Any: + """Execute a stream-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + async def execute() -> Any: + return await self._call_client_stream(method, stream, call_options) + + if call_options.retry_policy: + return await self._execute_with_retry(execute, call_options.retry_policy) + return await execute() + + def stream_stream( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> AsyncIterator[Any]: + """Execute a stream-stream RPC.""" + call_options = call_options or CallOptions() + return self._call_bidi_stream(method, stream, call_options) + + async def close(self) -> None: + """Close the underlying client.""" + await self._client.close() + + async def __aenter__(self) -> Self: + """Enter the async context manager.""" + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + """Exit the async context manager and close resources.""" + await self.close() + + async def _execute_with_retry(self, func: Any, retry_policy: RetryPolicy) -> Any: + """Execute an async function with retry logic.""" + attempt = 0 + backoff_ms = retry_policy.initial_backoff_ms + + while attempt < retry_policy.max_attempts: + try: + return await func() + except ConnecpyException as e: + # Check if the error is retryable + if ( + retry_policy.retryable_codes is None + or e.code not in retry_policy.retryable_codes + ): + raise + + # Check if we've exhausted retries + if attempt >= retry_policy.max_attempts - 1: + raise + + # Wait before retry with exponential backoff + await asyncio.sleep(backoff_ms / 1000.0) + backoff_ms = min( + int(backoff_ms * retry_policy.backoff_multiplier), + retry_policy.max_backoff_ms, + ) + attempt += 1 + + # Should never reach here + msg = "Retry loop exited unexpectedly" + raise RuntimeError(msg) + + def _validate_and_get_timeout(self, options: CallOptions) -> int | None: + """Validate timeout value and return the effective timeout. + + Args: + options: Call options containing timeout settings + + Returns: + The effective timeout in milliseconds, or None for infinite timeout + + Raises: + ValueError: If timeout is invalid (negative or too large) + """ + timeout_ms = ( + options.timeout_ms if options.timeout_ms is not None else self.timeout_ms + ) + if timeout_ms is not None: + if timeout_ms <= 0: + msg = f"Timeout must be positive, got {timeout_ms}ms" + raise ValueError(msg) + if timeout_ms > 8640000000: # 100 days max (protocol supports 100+ days) + msg = f"Timeout too large ({timeout_ms}ms), max is 100 days (8640000000ms)" + raise ValueError(msg) + return timeout_ms + + def _handle_timeout_error(self, e: httpx.TimeoutException) -> None: + """Convert HTTP timeout exception to ConnecpyException. + + Args: + e: The HTTP timeout exception + + Raises: + ConnecpyException: Always raises with DEADLINE_EXCEEDED code + """ + raise ConnecpyException(Code.DEADLINE_EXCEEDED, f"Request timeout: {e}") from e + + async def _call_unary( + self, method: MethodInfo, request: Any, options: CallOptions + ) -> Any: + """Internal method to call unary RPC through the client.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return await self._client.execute_unary( + request=request, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker + + def _call_server_stream( + self, method: MethodInfo, request: Any, options: CallOptions + ) -> AsyncIterator[Any]: + """Internal method to call server streaming RPC.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return self._client.execute_server_stream( + request=request, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker + + async def _call_client_stream( + self, method: MethodInfo, stream: AsyncIterator[Any], options: CallOptions + ) -> Any: + """Internal method to call client streaming RPC.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return await self._client.execute_client_stream( + request=stream, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker + + def _call_bidi_stream( + self, method: MethodInfo, stream: AsyncIterator[Any], options: CallOptions + ) -> AsyncIterator[Any]: + """Internal method to call bidirectional streaming RPC.""" + timeout_ms = self._validate_and_get_timeout(options) + try: + return self._client.execute_bidi_stream( + request=stream, + method=method, + headers=options.headers, + timeout_ms=timeout_ms, + ) + except httpx.TimeoutException as e: + self._handle_timeout_error(e) + raise # Unreachable, but satisfies type checker diff --git a/src/connecpy/transport/client/grpc.py b/src/connecpy/transport/client/grpc.py new file mode 100644 index 0000000..5f5965c --- /dev/null +++ b/src/connecpy/transport/client/grpc.py @@ -0,0 +1,375 @@ +"""gRPC protocol transport implementation.""" + +from __future__ import annotations + +import time +import types +from collections import OrderedDict +from collections.abc import Iterator +from typing import TYPE_CHECKING, Any, ClassVar + +from typing_extensions import Self + +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException + +from .base import CallOptions, RetryPolicy +from .types import ( + GrpcChannelCredentials, + GrpcChannelOptions, + GrpcCompression, + GrpcInterceptor, +) + +if TYPE_CHECKING: + from connecpy.method import MethodInfo + +try: + import grpc # type: ignore[import-untyped] + + GRPC_AVAILABLE = True +except ImportError: + GRPC_AVAILABLE = False + grpc = None # type: ignore[assignment] + + +class GrpcTransport: + """Transport implementation using the gRPC protocol. + + This transport uses grpcio to communicate with gRPC servers. + Requires the 'grpcio' package to be installed. + """ + + # Compression algorithm constants + _COMPRESSION_NONE = 0 # grpc.Compression.NoCompression + _COMPRESSION_DEFLATE = 1 # grpc.Compression.Deflate + _COMPRESSION_GZIP = 2 # grpc.Compression.Gzip + _COMPRESSION_MAP: ClassVar[dict[str, int]] = { + "none": _COMPRESSION_NONE, + "deflate": _COMPRESSION_DEFLATE, + "gzip": _COMPRESSION_GZIP, + } + + def __init__( + self, + target: str, + *, + credentials: GrpcChannelCredentials | None = None, + options: GrpcChannelOptions | None = None, + compression: GrpcCompression | str | None = None, + interceptors: list[GrpcInterceptor] | None = None, + ) -> None: + """Initialize the gRPC transport with all grpc channel parameters. + + Args: + target: The server address (e.g., "localhost:50051") + credentials: Channel credentials for secure connections (None for insecure) + options: List of gRPC channel options as key-value tuples + compression: Default compression algorithm + interceptors: List of client interceptors + """ + if not GRPC_AVAILABLE: + msg = ( + "grpcio is required for GrpcTransport. " + "Install it with: pip install connecpy[grpc]" + ) + raise ImportError(msg) + + self._target = ( + target.replace("grpc://", "").replace("https://", "").replace("http://", "") + ) + + # Store parameters for later use + self.credentials = credentials + self.options = options or [] + self.compression = compression + self.interceptors = interceptors or [] + + # Add compression to options if specified + channel_options = list(self.options) + if compression is not None: + channel_options.append( + ( + "grpc.default_compression_algorithm", + compression + if isinstance(compression, int) + else self._get_grpc_compression(str(compression)), + ) + ) + + # Create gRPC channel + if credentials is not None: + self._channel = grpc.secure_channel( # type: ignore[attr-defined] + self._target, credentials, options=channel_options + ) + else: + self._channel = grpc.insecure_channel(self._target, options=channel_options) # type: ignore[attr-defined] + + # Apply interceptors if provided + if self.interceptors: + self._channel = grpc.intercept_channel(self._channel, *self.interceptors) # type: ignore[attr-defined] + + # LRU cache for stubs (max 100 entries to prevent memory issues) + self._stubs: OrderedDict[str, Any] = OrderedDict() + self._max_stub_cache_size = 100 + + def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: + """Execute a unary-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + def execute() -> Any: + stub = self._get_or_create_stub(method, "unary_unary") + metadata = self._prepare_metadata(call_options) + + # Validate and convert timeout + timeout = None + if call_options.timeout_ms is not None: + if call_options.timeout_ms <= 0: + msg = f"Timeout must be positive, got {call_options.timeout_ms}ms" + raise ValueError(msg) + if ( + call_options.timeout_ms > 8640000000 + ): # 100 days max (protocol supports 100+ days) + msg = f"Timeout too large ({call_options.timeout_ms}ms), max is 100 days (8640000000ms)" + raise ValueError(msg) + timeout = call_options.timeout_ms / 1000.0 + + try: + return stub(request, metadata=metadata, timeout=timeout) + except grpc.RpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + details = e.details() or "No details provided" + # Get status code name (handle both enum and string) + status_name = getattr(e.code(), "name", str(e.code())) + msg = f"gRPC unary call failed [{status_name}]: {details}" + raise ConnecpyException(code, msg) from e + + if call_options.retry_policy: + return self._execute_with_retry(execute, call_options.retry_policy) + return execute() + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Iterator[Any]: + """Execute a unary-stream RPC.""" + call_options = call_options or CallOptions() + stub = self._get_or_create_stub(method, "unary_stream") + metadata = self._prepare_metadata(call_options) + timeout = call_options.timeout_ms / 1000.0 if call_options.timeout_ms else None + + try: + yield from stub(request, metadata=metadata, timeout=timeout) + except grpc.RpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + details = e.details() or "No details provided" + # Get status code name (handle both enum and string) + status_name = getattr(e.code(), "name", str(e.code())) + msg = f"gRPC server stream failed [{status_name}]: {details}" + raise ConnecpyException(code, msg) from e + + def stream_unary( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Any: + """Execute a stream-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + def execute() -> Any: + stub = self._get_or_create_stub(method, "stream_unary") + metadata = self._prepare_metadata(call_options) + timeout = ( + call_options.timeout_ms / 1000.0 if call_options.timeout_ms else None + ) + + try: + return stub(stream, metadata=metadata, timeout=timeout) + except grpc.RpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + details = e.details() or "No details provided" + # Get status code name (handle both enum and string) + status_name = getattr(e.code(), "name", str(e.code())) + msg = f"gRPC client stream failed [{status_name}]: {details}" + raise ConnecpyException(code, msg) from e + + if call_options.retry_policy: + return self._execute_with_retry(execute, call_options.retry_policy) + return execute() + + def stream_stream( + self, + method: MethodInfo, + stream: Iterator[Any], + call_options: CallOptions | None = None, + ) -> Iterator[Any]: + """Execute a stream-stream RPC.""" + call_options = call_options or CallOptions() + stub = self._get_or_create_stub(method, "stream_stream") + metadata = self._prepare_metadata(call_options) + timeout = call_options.timeout_ms / 1000.0 if call_options.timeout_ms else None + + try: + yield from stub(stream, metadata=metadata, timeout=timeout) + except grpc.RpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + details = e.details() or "No details provided" + # Get status code name (handle both enum and string) + status_name = getattr(e.code(), "name", str(e.code())) + msg = f"gRPC bidi stream failed [{status_name}]: {details}" + raise ConnecpyException(code, msg) from e + + def close(self) -> None: + """Close the gRPC channel.""" + self._channel.close() + + def __enter__(self) -> Self: + """Enter the context manager.""" + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + """Exit the context manager and close resources.""" + self.close() + + def _get_or_create_stub(self, method: MethodInfo, rpc_type: str) -> Any: + """Get or create a gRPC stub for the given method with LRU cache.""" + # Build the full method name + full_method_name = f"/{method.service_name}/{method.name}" + + # Check if stub exists and move to end (LRU) + if full_method_name in self._stubs: + self._stubs.move_to_end(full_method_name) + return self._stubs[full_method_name] + + # Check cache size limit + if len(self._stubs) >= self._max_stub_cache_size: + # Remove oldest item (FIFO) + self._stubs.popitem(last=False) + + # Create new stub + if True: # Always create since we just checked it doesn't exist + # Create the appropriate stub based on RPC type + if rpc_type == "unary_unary": + self._stubs[full_method_name] = self._channel.unary_unary( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + elif rpc_type == "unary_stream": + self._stubs[full_method_name] = self._channel.unary_stream( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + elif rpc_type == "stream_unary": + self._stubs[full_method_name] = self._channel.stream_unary( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + elif rpc_type == "stream_stream": + self._stubs[full_method_name] = self._channel.stream_stream( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + + return self._stubs[full_method_name] + + def _prepare_metadata(self, call_options: CallOptions) -> list[tuple[str, str]]: + """Prepare gRPC metadata from options.""" + metadata = [] + for key, value in call_options.headers.items(): + metadata.append((key.lower(), value)) + return metadata + + def _merge_options(self, call_options: CallOptions | None) -> CallOptions: + """Merge call options with transport defaults.""" + # Since we no longer have default TransportOptions, + # just return the provided options or an empty one + return call_options or CallOptions() + + def _execute_with_retry(self, func: Any, retry_policy: RetryPolicy) -> Any: + """Execute a function with retry logic.""" + + attempt = 0 + backoff_ms = retry_policy.initial_backoff_ms + + while attempt < retry_policy.max_attempts: + try: + return func() + except grpc.RpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + + # Check if the error is retryable + if ( + retry_policy.retryable_codes is None + or code not in retry_policy.retryable_codes + ): + status_name = getattr(e.code(), "name", str(e.code())) + raise ConnecpyException( + code, + f"Non-retryable error [{status_name}]: {e.details() or 'No details'}", + ) from e + + # Check if we've exhausted retries + if attempt >= retry_policy.max_attempts - 1: + status_name = getattr(e.code(), "name", str(e.code())) + raise ConnecpyException( + code, + f"Max retries ({retry_policy.max_attempts}) exceeded [{status_name}]: {e.details() or 'No details'}", + ) from e + + # Wait before retry with exponential backoff + time.sleep(backoff_ms / 1000.0) + backoff_ms = min( + int(backoff_ms * retry_policy.backoff_multiplier), + retry_policy.max_backoff_ms, + ) + attempt += 1 + + # Should never reach here + msg = "Retry loop exited unexpectedly" + raise RuntimeError(msg) + + def _grpc_status_to_code(self, grpc_status: grpc.StatusCode) -> Code: # type: ignore[name-defined, type-arg] + """Convert gRPC status code to Connect Code.""" + + # Note: Connect doesn't have an OK code, only error codes + if not GRPC_AVAILABLE: + return Code.UNKNOWN + mapping = { + grpc.StatusCode.CANCELLED: Code.CANCELED, # type: ignore[attr-defined] + grpc.StatusCode.UNKNOWN: Code.UNKNOWN, # type: ignore[attr-defined] + grpc.StatusCode.INVALID_ARGUMENT: Code.INVALID_ARGUMENT, # type: ignore[attr-defined] + grpc.StatusCode.DEADLINE_EXCEEDED: Code.DEADLINE_EXCEEDED, # type: ignore[attr-defined] + grpc.StatusCode.NOT_FOUND: Code.NOT_FOUND, # type: ignore[attr-defined] + grpc.StatusCode.ALREADY_EXISTS: Code.ALREADY_EXISTS, # type: ignore[attr-defined] + grpc.StatusCode.PERMISSION_DENIED: Code.PERMISSION_DENIED, # type: ignore[attr-defined] + grpc.StatusCode.RESOURCE_EXHAUSTED: Code.RESOURCE_EXHAUSTED, # type: ignore[attr-defined] + grpc.StatusCode.FAILED_PRECONDITION: Code.FAILED_PRECONDITION, # type: ignore[attr-defined] + grpc.StatusCode.ABORTED: Code.ABORTED, # type: ignore[attr-defined] + grpc.StatusCode.OUT_OF_RANGE: Code.OUT_OF_RANGE, # type: ignore[attr-defined] + grpc.StatusCode.UNIMPLEMENTED: Code.UNIMPLEMENTED, # type: ignore[attr-defined] + grpc.StatusCode.INTERNAL: Code.INTERNAL, # type: ignore[attr-defined] + grpc.StatusCode.UNAVAILABLE: Code.UNAVAILABLE, # type: ignore[attr-defined] + grpc.StatusCode.DATA_LOSS: Code.DATA_LOSS, # type: ignore[attr-defined] + grpc.StatusCode.UNAUTHENTICATED: Code.UNAUTHENTICATED, # type: ignore[attr-defined] + } + return mapping.get(grpc_status, Code.UNKNOWN) + + def _get_grpc_compression(self, compression: str) -> int: + """Convert compression name to gRPC compression algorithm.""" + return self._COMPRESSION_MAP.get(compression.lower(), self._COMPRESSION_NONE) diff --git a/src/connecpy/transport/client/grpc_async.py b/src/connecpy/transport/client/grpc_async.py new file mode 100644 index 0000000..585ad6a --- /dev/null +++ b/src/connecpy/transport/client/grpc_async.py @@ -0,0 +1,344 @@ +"""Async gRPC protocol transport implementation.""" + +from __future__ import annotations + +import asyncio +import types +from collections import OrderedDict +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING, Any, ClassVar + +from typing_extensions import Self + +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException + +from .base import CallOptions, RetryPolicy +from .types import ( + GrpcChannelCredentials, + GrpcChannelOptions, + GrpcClientInterceptor, + GrpcCompression, +) + +if TYPE_CHECKING: + from connecpy.method import MethodInfo + +try: + import grpc # type: ignore[import-untyped] + import grpc.aio # type: ignore[import-untyped] + + GRPC_AVAILABLE = True +except ImportError: + GRPC_AVAILABLE = False + grpc = None # type: ignore[assignment] + + +class GrpcTransportAsync: + """Async transport implementation using the gRPC protocol. + + This transport uses grpcio's async/await support to communicate with gRPC servers. + Requires the 'grpcio' package to be installed. + """ + + # Compression algorithm constants + _COMPRESSION_NONE = 0 # grpc.Compression.NoCompression + _COMPRESSION_DEFLATE = 1 # grpc.Compression.Deflate + _COMPRESSION_GZIP = 2 # grpc.Compression.Gzip + _COMPRESSION_MAP: ClassVar[dict[str, int]] = { + "none": _COMPRESSION_NONE, + "deflate": _COMPRESSION_DEFLATE, + "gzip": _COMPRESSION_GZIP, + } + + def __init__( + self, + target: str, + *, + credentials: GrpcChannelCredentials | None = None, + options: GrpcChannelOptions | None = None, + compression: GrpcCompression | str | None = None, + interceptors: list[GrpcClientInterceptor] | None = None, + ) -> None: + """Initialize the async gRPC transport with all grpc.aio channel parameters. + + Args: + target: The server address (e.g., "localhost:50051") + credentials: Channel credentials for secure connections (None for insecure) + options: List of gRPC channel options as key-value tuples + compression: Default compression algorithm + interceptors: List of async client interceptors + """ + if not GRPC_AVAILABLE: + msg = ( + "grpcio is required for GrpcTransportAsync. " + "Install it with: pip install connecpy[grpc]" + ) + raise ImportError(msg) + + self._target = ( + target.replace("grpc://", "").replace("https://", "").replace("http://", "") + ) + + # Store parameters for later use + self.credentials = credentials + self.options = options or [] + self.compression = compression + self.interceptors = interceptors or [] + + # Add compression to options if specified + channel_options = list(self.options) + if compression is not None: + channel_options.append( + ( + "grpc.default_compression_algorithm", + compression + if isinstance(compression, int) + else self._get_grpc_compression(str(compression)), + ) + ) + + # Create async gRPC channel + if credentials is not None: + self._channel = grpc.aio.secure_channel( # type: ignore[attr-defined] + self._target, + credentials, + options=channel_options, + interceptors=self.interceptors, + ) + else: + self._channel = grpc.aio.insecure_channel( # type: ignore[attr-defined] + self._target, options=channel_options, interceptors=self.interceptors + ) + + # LRU cache for stubs (max 100 entries to prevent memory issues) + self._stubs: OrderedDict[str, Any] = OrderedDict() + self._max_stub_cache_size = 100 + + async def unary_unary( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> Any: + """Execute a unary-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + async def execute() -> Any: + stub = self._get_or_create_stub(method, "unary_unary") + metadata = self._prepare_metadata(call_options) + + # Validate and convert timeout + timeout = None + if call_options.timeout_ms is not None: + if call_options.timeout_ms <= 0: + msg = f"Timeout must be positive, got {call_options.timeout_ms}ms" + raise ValueError(msg) + if ( + call_options.timeout_ms > 8640000000 + ): # 100 days max (protocol supports 100+ days) + msg = f"Timeout too large ({call_options.timeout_ms}ms), max is 100 days (8640000000ms)" + raise ValueError(msg) + timeout = call_options.timeout_ms / 1000.0 + + try: + return await stub(request, metadata=metadata, timeout=timeout) + except grpc.aio.AioRpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + details = e.details() or "No details provided" + msg = f"gRPC unary call failed [{e.code().name}]: {details}" + raise ConnecpyException(code, msg) from e + + if call_options.retry_policy: + return await self._execute_with_retry(execute, call_options.retry_policy) + return await execute() + + def unary_stream( + self, method: MethodInfo, request: Any, call_options: CallOptions | None = None + ) -> AsyncIterator[Any]: + """Execute a unary-stream RPC.""" + call_options = call_options or CallOptions() + stub = self._get_or_create_stub(method, "unary_stream") + metadata = self._prepare_metadata(call_options) + timeout = call_options.timeout_ms / 1000.0 if call_options.timeout_ms else None + return stub(request, metadata=metadata, timeout=timeout) + + async def stream_unary( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> Any: + """Execute a stream-unary RPC with optional retry.""" + call_options = call_options or CallOptions() + + async def execute() -> Any: + stub = self._get_or_create_stub(method, "stream_unary") + metadata = self._prepare_metadata(call_options) + timeout = ( + call_options.timeout_ms / 1000.0 if call_options.timeout_ms else None + ) + + try: + return await stub(stream, metadata=metadata, timeout=timeout) + except grpc.aio.AioRpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + details = e.details() or "No details provided" + msg = f"gRPC stream call failed [{e.code().name}]: {details}" + raise ConnecpyException(code, msg) from e + + if call_options.retry_policy: + return await self._execute_with_retry(execute, call_options.retry_policy) + return await execute() + + def stream_stream( + self, + method: MethodInfo, + stream: AsyncIterator[Any], + call_options: CallOptions | None = None, + ) -> AsyncIterator[Any]: + """Execute a stream-stream RPC.""" + call_options = call_options or CallOptions() + stub = self._get_or_create_stub(method, "stream_stream") + metadata = self._prepare_metadata(call_options) + timeout = call_options.timeout_ms / 1000.0 if call_options.timeout_ms else None + return stub(stream, metadata=metadata, timeout=timeout) + + async def close(self) -> None: + """Close the gRPC channel.""" + await self._channel.close() + + async def __aenter__(self) -> Self: + """Enter the async context manager.""" + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + """Exit the async context manager and close resources.""" + await self.close() + + def _get_or_create_stub(self, method: MethodInfo, rpc_type: str) -> Any: + """Get or create an async gRPC stub for the given method with LRU cache.""" + # Build the full method name + full_method_name = f"/{method.service_name}/{method.name}" + + # Check if stub exists and move to end (LRU) + if full_method_name in self._stubs: + self._stubs.move_to_end(full_method_name) + return self._stubs[full_method_name] + + # Check cache size limit + if len(self._stubs) >= self._max_stub_cache_size: + # Remove oldest item (FIFO) + self._stubs.popitem(last=False) + + # Create new stub + if True: # Always create since we just checked it doesn't exist + # Create the appropriate stub based on RPC type + if rpc_type == "unary_unary": + self._stubs[full_method_name] = self._channel.unary_unary( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + elif rpc_type == "unary_stream": + self._stubs[full_method_name] = self._channel.unary_stream( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + elif rpc_type == "stream_unary": + self._stubs[full_method_name] = self._channel.stream_unary( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + elif rpc_type == "stream_stream": + self._stubs[full_method_name] = self._channel.stream_stream( + full_method_name, + request_serializer=lambda x: x.SerializeToString(), # type: ignore[attr-defined] + response_deserializer=lambda x: method.output().FromString(x), + ) + + return self._stubs[full_method_name] + + def _prepare_metadata(self, call_options: CallOptions) -> list[tuple[str, str]]: + """Prepare gRPC metadata from options.""" + metadata = [] + for key, value in call_options.headers.items(): + metadata.append((key.lower(), value)) + return metadata + + async def _execute_with_retry(self, func: Any, retry_policy: RetryPolicy) -> Any: + """Execute an async function with retry logic.""" + attempt = 0 + backoff_ms = retry_policy.initial_backoff_ms + + while attempt < retry_policy.max_attempts: + try: + return await func() + except grpc.aio.AioRpcError as e: # type: ignore[attr-defined] + # Convert gRPC error to ConnecpyException for consistency + code = self._grpc_status_to_code(e.code()) + + # Check if the error is retryable + if ( + retry_policy.retryable_codes is None + or code not in retry_policy.retryable_codes + ): + status_name = getattr(e.code(), "name", str(e.code())) + raise ConnecpyException( + code, + f"Non-retryable error [{status_name}]: {e.details() or 'No details'}", + ) from e + + # Check if we've exhausted retries + if attempt >= retry_policy.max_attempts - 1: + raise ConnecpyException( + code, + f"Max retries ({retry_policy.max_attempts}) exceeded [{e.code().name}]: {e.details() or 'No details'}", + ) from e + + # Wait before retry with exponential backoff + await asyncio.sleep(backoff_ms / 1000.0) + backoff_ms = min( + int(backoff_ms * retry_policy.backoff_multiplier), + retry_policy.max_backoff_ms, + ) + attempt += 1 + + # Should never reach here + msg = "Retry loop exited unexpectedly" + raise RuntimeError(msg) + + def _grpc_status_to_code(self, grpc_status: grpc.StatusCode) -> Code: # type: ignore[name-defined, type-arg] + """Convert gRPC status code to Connect Code.""" + # Note: Connect doesn't have an OK code, only error codes + if not GRPC_AVAILABLE: + return Code.UNKNOWN + mapping = { + grpc.StatusCode.CANCELLED: Code.CANCELED, # type: ignore[attr-defined] + grpc.StatusCode.UNKNOWN: Code.UNKNOWN, # type: ignore[attr-defined] + grpc.StatusCode.INVALID_ARGUMENT: Code.INVALID_ARGUMENT, # type: ignore[attr-defined] + grpc.StatusCode.DEADLINE_EXCEEDED: Code.DEADLINE_EXCEEDED, # type: ignore[attr-defined] + grpc.StatusCode.NOT_FOUND: Code.NOT_FOUND, # type: ignore[attr-defined] + grpc.StatusCode.ALREADY_EXISTS: Code.ALREADY_EXISTS, # type: ignore[attr-defined] + grpc.StatusCode.PERMISSION_DENIED: Code.PERMISSION_DENIED, # type: ignore[attr-defined] + grpc.StatusCode.RESOURCE_EXHAUSTED: Code.RESOURCE_EXHAUSTED, # type: ignore[attr-defined] + grpc.StatusCode.FAILED_PRECONDITION: Code.FAILED_PRECONDITION, # type: ignore[attr-defined] + grpc.StatusCode.ABORTED: Code.ABORTED, # type: ignore[attr-defined] + grpc.StatusCode.OUT_OF_RANGE: Code.OUT_OF_RANGE, # type: ignore[attr-defined] + grpc.StatusCode.UNIMPLEMENTED: Code.UNIMPLEMENTED, # type: ignore[attr-defined] + grpc.StatusCode.INTERNAL: Code.INTERNAL, # type: ignore[attr-defined] + grpc.StatusCode.UNAVAILABLE: Code.UNAVAILABLE, # type: ignore[attr-defined] + grpc.StatusCode.DATA_LOSS: Code.DATA_LOSS, # type: ignore[attr-defined] + grpc.StatusCode.UNAUTHENTICATED: Code.UNAUTHENTICATED, # type: ignore[attr-defined] + } + return mapping.get(grpc_status, Code.UNKNOWN) + + def _get_grpc_compression(self, compression: str) -> int: + """Convert compression name to gRPC compression algorithm.""" + return self._COMPRESSION_MAP.get(compression.lower(), self._COMPRESSION_NONE) diff --git a/src/connecpy/transport/client/types.py b/src/connecpy/transport/client/types.py new file mode 100644 index 0000000..ab4f2a7 --- /dev/null +++ b/src/connecpy/transport/client/types.py @@ -0,0 +1,28 @@ +"""Type definitions for the Transport API.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from typing import TypeAlias + + import grpc # type: ignore[import-untyped] + import grpc.aio # type: ignore[import-untyped] + + GrpcChannelCredentials: TypeAlias = grpc.ChannelCredentials + GrpcCompression: TypeAlias = grpc.Compression + GrpcChannel: TypeAlias = grpc.Channel | grpc.aio.Channel + GrpcClientInterceptor: TypeAlias = grpc.aio.ClientInterceptor + GrpcInterceptor: TypeAlias = ( + grpc.UnaryUnaryClientInterceptor | grpc.aio.ClientInterceptor + ) + GrpcChannelOptions: TypeAlias = list[tuple[str, Any]] +else: + # Runtime fallback when grpcio is not installed + GrpcChannelCredentials: TypeAlias = Any # type: ignore[misc] + GrpcCompression: TypeAlias = Any # type: ignore[misc] + GrpcChannel: TypeAlias = Any # type: ignore[misc] + GrpcClientInterceptor: TypeAlias = Any # type: ignore[misc] + GrpcInterceptor: TypeAlias = Any # type: ignore[misc] + GrpcChannelOptions: TypeAlias = list[tuple[str, Any]] # type: ignore[misc] diff --git a/test/haberdasher_connecpy.py b/test/haberdasher_connecpy.py index 56a6f01..f4b5ef9 100644 --- a/test/haberdasher_connecpy.py +++ b/test/haberdasher_connecpy.py @@ -2,7 +2,7 @@ # source: haberdasher.proto from collections.abc import AsyncIterator, Iterable, Iterator, Mapping -from typing import Protocol +from typing import ClassVar, Protocol import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 @@ -23,6 +23,56 @@ class Haberdasher(Protocol): + """Service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_flexible_hat": MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "make_similar_hats": MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_various_hats": MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "list_parts": MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "do_nothing": MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + }, + } + async def make_hat( self, request: haberdasher__pb2.Size, ctx: RequestContext ) -> haberdasher__pb2.Hat: @@ -260,6 +310,56 @@ async def do_nothing( class HaberdasherSync(Protocol): + """Synchronous service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_flexible_hat": MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "make_similar_hats": MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_various_hats": MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "list_parts": MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "do_nothing": MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + }, + } + def make_hat( self, request: haberdasher__pb2.Size, ctx: RequestContext ) -> haberdasher__pb2.Hat: diff --git a/test/haberdasher_edition_2023_connecpy.py b/test/haberdasher_edition_2023_connecpy.py index c969936..daf101d 100644 --- a/test/haberdasher_edition_2023_connecpy.py +++ b/test/haberdasher_edition_2023_connecpy.py @@ -2,7 +2,7 @@ # source: haberdasher_edition_2023.proto from collections.abc import Iterable, Mapping -from typing import Protocol +from typing import ClassVar, Protocol from connecpy.client import ConnecpyClient, ConnecpyClientSync from connecpy.code import Code @@ -21,6 +21,21 @@ class Haberdasher(Protocol): + """Service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example2023.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + }, + } + async def make_hat( self, request: haberdasher__edition__2023__pb2.Size, ctx: RequestContext ) -> haberdasher__edition__2023__pb2.Hat: @@ -83,6 +98,21 @@ async def make_hat( class HaberdasherSync(Protocol): + """Synchronous service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example2023.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + }, + } + def make_hat( self, request: haberdasher__edition__2023__pb2.Size, ctx: RequestContext ) -> haberdasher__edition__2023__pb2.Hat: diff --git a/test/haberdasher_edition_2023_pb2.py b/test/haberdasher_edition_2023_pb2.py index 35f95cd..7cceeb6 100644 --- a/test/haberdasher_edition_2023_pb2.py +++ b/test/haberdasher_edition_2023_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: haberdasher_edition_2023.proto -# Protobuf Python Version: 6.32.0 +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -11,9 +11,9 @@ from google.protobuf.internal import builder as _builder _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, - 6, - 32, - 0, + 5, + 29, + 3, '', 'haberdasher_edition_2023.proto' ) @@ -24,7 +24,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ehaberdasher_edition_2023.proto\x12\x18i2y.connecpy.example2023\"C\n\x03Hat\x12\x12\n\x04size\x18\x01 \x01(\x05R\x04size\x12\x14\n\x05\x63olor\x18\x02 \x01(\tR\x05\x63olor\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"@\n\x04Size\x12\x16\n\x06inches\x18\x01 \x01(\x05R\x06inches\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription2\\\n\x0bHaberdasher\x12M\n\x07MakeHat\x12\x1e.i2y.connecpy.example2023.Size\x1a\x1d.i2y.connecpy.example2023.Hat\"\x03\x90\x02\x01\x42\rZ\x0b\x65xample2023b\x08\x65\x64itionsp\xe8\x07') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ehaberdasher_edition_2023.proto\x12\x18i2y.connecpy.example2023\"0\n\x03Hat\x12\x0c\n\x04size\x18\x01 \x01(\x05\x12\r\n\x05\x63olor\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\"+\n\x04Size\x12\x0e\n\x06inches\x18\x01 \x01(\x05\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t2\\\n\x0bHaberdasher\x12M\n\x07MakeHat\x12\x1e.i2y.connecpy.example2023.Size\x1a\x1d.i2y.connecpy.example2023.Hat\"\x03\x90\x02\x01\x42\rZ\x0b\x65xample2023b\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -35,9 +35,9 @@ _globals['_HABERDASHER'].methods_by_name['MakeHat']._loaded_options = None _globals['_HABERDASHER'].methods_by_name['MakeHat']._serialized_options = b'\220\002\001' _globals['_HAT']._serialized_start=60 - _globals['_HAT']._serialized_end=127 - _globals['_SIZE']._serialized_start=129 - _globals['_SIZE']._serialized_end=193 - _globals['_HABERDASHER']._serialized_start=195 - _globals['_HABERDASHER']._serialized_end=287 + _globals['_HAT']._serialized_end=108 + _globals['_SIZE']._serialized_start=110 + _globals['_SIZE']._serialized_end=153 + _globals['_HABERDASHER']._serialized_start=155 + _globals['_HABERDASHER']._serialized_end=247 # @@protoc_insertion_point(module_scope) diff --git a/test/haberdasher_pb2.py b/test/haberdasher_pb2.py index 075aa7c..060da2a 100644 --- a/test/haberdasher_pb2.py +++ b/test/haberdasher_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: haberdasher.proto -# Protobuf Python Version: 6.32.0 +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -11,9 +11,9 @@ from google.protobuf.internal import builder as _builder _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, - 6, - 32, - 0, + 5, + 29, + 3, '', 'haberdasher.proto' ) @@ -25,7 +25,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11haberdasher.proto\x12\x14i2y.connecpy.example\x1a\x1bgoogle/protobuf/empty.proto\"i\n\x03Hat\x12\x12\n\x04size\x18\x01 \x01(\x05R\x04size\x12\x14\n\x05\x63olor\x18\x02 \x01(\tR\x05\x63olor\x12\x17\n\x04name\x18\x03 \x01(\tH\x00R\x04name\x88\x01\x01\x1a\x16\n\x04Part\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02idB\x07\n\x05_name\"@\n\x04Size\x12\x16\n\x06inches\x18\x01 \x01(\x05R\x06inches\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription2\xc9\x03\n\x0bHaberdasher\x12\x45\n\x07MakeHat\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x03\x90\x02\x01\x12L\n\x0fMakeFlexibleHat\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x00(\x01\x12O\n\x0fMakeSimilarHats\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x03\x90\x02\x01\x30\x01\x12N\n\x0fMakeVariousHats\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x00(\x01\x30\x01\x12G\n\tListParts\x12\x16.google.protobuf.Empty\x1a\x1e.i2y.connecpy.example.Hat.Part\"\x00\x30\x01\x12;\n\tDoNothing\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.EmptyB\tZ\x07\x65xampleb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11haberdasher.proto\x12\x14i2y.connecpy.example\x1a\x1bgoogle/protobuf/empty.proto\"R\n\x03Hat\x12\x0c\n\x04size\x18\x01 \x01(\x05\x12\r\n\x05\x63olor\x18\x02 \x01(\t\x12\x11\n\x04name\x18\x03 \x01(\tH\x00\x88\x01\x01\x1a\x12\n\x04Part\x12\n\n\x02id\x18\x01 \x01(\tB\x07\n\x05_name\"+\n\x04Size\x12\x0e\n\x06inches\x18\x01 \x01(\x05\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t2\xc9\x03\n\x0bHaberdasher\x12\x45\n\x07MakeHat\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x03\x90\x02\x01\x12L\n\x0fMakeFlexibleHat\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x00(\x01\x12O\n\x0fMakeSimilarHats\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x03\x90\x02\x01\x30\x01\x12N\n\x0fMakeVariousHats\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x00(\x01\x30\x01\x12G\n\tListParts\x12\x16.google.protobuf.Empty\x1a\x1e.i2y.connecpy.example.Hat.Part\"\x00\x30\x01\x12;\n\tDoNothing\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.EmptyB\tZ\x07\x65xampleb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,11 +38,11 @@ _globals['_HABERDASHER'].methods_by_name['MakeSimilarHats']._loaded_options = None _globals['_HABERDASHER'].methods_by_name['MakeSimilarHats']._serialized_options = b'\220\002\001' _globals['_HAT']._serialized_start=72 - _globals['_HAT']._serialized_end=177 - _globals['_HAT_PART']._serialized_start=146 - _globals['_HAT_PART']._serialized_end=168 - _globals['_SIZE']._serialized_start=179 - _globals['_SIZE']._serialized_end=243 - _globals['_HABERDASHER']._serialized_start=246 - _globals['_HABERDASHER']._serialized_end=703 + _globals['_HAT']._serialized_end=154 + _globals['_HAT_PART']._serialized_start=127 + _globals['_HAT_PART']._serialized_end=145 + _globals['_SIZE']._serialized_start=156 + _globals['_SIZE']._serialized_end=199 + _globals['_HABERDASHER']._serialized_start=202 + _globals['_HABERDASHER']._serialized_end=659 # @@protoc_insertion_point(module_scope) diff --git a/test/transport_api/__init__.py b/test/transport_api/__init__.py new file mode 100644 index 0000000..acaf4b1 --- /dev/null +++ b/test/transport_api/__init__.py @@ -0,0 +1 @@ +"""Tests for the experimental Transport API feature.""" diff --git a/test/transport_api/buf.gen.yaml b/test/transport_api/buf.gen.yaml new file mode 100644 index 0000000..9d4489c --- /dev/null +++ b/test/transport_api/buf.gen.yaml @@ -0,0 +1,14 @@ +version: v2 +plugins: + - remote: buf.build/protocolbuffers/python:v32.0 + out: transport_api + - remote: buf.build/protocolbuffers/pyi:v32.0 + out: transport_api + - local: + - go + - run + - /Users/i2y/connecpy/protoc-gen-connecpy + out: transport_api + opt: + - imports=relative + - transport_api=true \ No newline at end of file diff --git a/test/transport_api/buf.yaml b/test/transport_api/buf.yaml new file mode 100644 index 0000000..0555821 --- /dev/null +++ b/test/transport_api/buf.yaml @@ -0,0 +1,3 @@ +version: v2 +modules: + - path: . \ No newline at end of file diff --git a/test/transport_api/haberdasher_connecpy.py b/test/transport_api/haberdasher_connecpy.py new file mode 100644 index 0000000..f4b5ef9 --- /dev/null +++ b/test/transport_api/haberdasher_connecpy.py @@ -0,0 +1,595 @@ +# Generated by https://github.com/i2y/connecpy/v2/protoc-gen-connecpy. DO NOT EDIT! +# source: haberdasher.proto + +from collections.abc import AsyncIterator, Iterable, Iterator, Mapping +from typing import ClassVar, Protocol + +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + +from connecpy.client import ConnecpyClient, ConnecpyClientSync +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.interceptor import Interceptor, InterceptorSync +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.request import Headers, RequestContext +from connecpy.server import ( + ConnecpyASGIApplication, + ConnecpyWSGIApplication, + Endpoint, + EndpointSync, +) + +from . import haberdasher_pb2 as haberdasher__pb2 + + +class Haberdasher(Protocol): + """Service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_flexible_hat": MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "make_similar_hats": MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_various_hats": MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "list_parts": MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "do_nothing": MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + }, + } + + async def make_hat( + self, request: haberdasher__pb2.Size, ctx: RequestContext + ) -> haberdasher__pb2.Hat: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + async def make_flexible_hat( + self, request: AsyncIterator[haberdasher__pb2.Size], ctx: RequestContext + ) -> haberdasher__pb2.Hat: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def make_similar_hats( + self, request: haberdasher__pb2.Size, ctx: RequestContext + ) -> AsyncIterator[haberdasher__pb2.Hat]: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def make_various_hats( + self, request: AsyncIterator[haberdasher__pb2.Size], ctx: RequestContext + ) -> AsyncIterator[haberdasher__pb2.Hat]: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def list_parts( + self, request: google_dot_protobuf_dot_empty__pb2.Empty, ctx: RequestContext + ) -> AsyncIterator[haberdasher__pb2.Hat.Part]: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + async def do_nothing( + self, request: google_dot_protobuf_dot_empty__pb2.Empty, ctx: RequestContext + ) -> google_dot_protobuf_dot_empty__pb2.Empty: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + +class HaberdasherASGIApplication(ConnecpyASGIApplication): + def __init__( + self, + service: Haberdasher, + *, + interceptors: Iterable[Interceptor] = (), + read_max_bytes: int | None = None, + ) -> None: + super().__init__( + endpoints={ + "/i2y.connecpy.example.Haberdasher/MakeHat": Endpoint.unary( + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + function=service.make_hat, + ), + "/i2y.connecpy.example.Haberdasher/MakeFlexibleHat": Endpoint.client_stream( + method=MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.make_flexible_hat, + ), + "/i2y.connecpy.example.Haberdasher/MakeSimilarHats": Endpoint.server_stream( + method=MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + function=service.make_similar_hats, + ), + "/i2y.connecpy.example.Haberdasher/MakeVariousHats": Endpoint.bidi_stream( + method=MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.make_various_hats, + ), + "/i2y.connecpy.example.Haberdasher/ListParts": Endpoint.server_stream( + method=MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.list_parts, + ), + "/i2y.connecpy.example.Haberdasher/DoNothing": Endpoint.unary( + method=MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.do_nothing, + ), + }, + interceptors=interceptors, + read_max_bytes=read_max_bytes, + ) + + @property + def path(self) -> str: + """Returns the URL path to mount the application to when serving multiple applications.""" + return "/i2y.connecpy.example.Haberdasher" + + +class HaberdasherClient(ConnecpyClient): + async def make_hat( + self, + request: haberdasher__pb2.Size, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + use_get: bool = False, + ) -> haberdasher__pb2.Hat: + return await self.execute_unary( + request=request, + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + headers=headers, + timeout_ms=timeout_ms, + use_get=use_get, + ) + + async def make_flexible_hat( + self, + request: AsyncIterator[haberdasher__pb2.Size], + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> haberdasher__pb2.Hat: + return await self.execute_client_stream( + request=request, + method=MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def make_similar_hats( + self, + request: haberdasher__pb2.Size, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> AsyncIterator[haberdasher__pb2.Hat]: + return self.execute_server_stream( + request=request, + method=MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def make_various_hats( + self, + request: AsyncIterator[haberdasher__pb2.Size], + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> AsyncIterator[haberdasher__pb2.Hat]: + return self.execute_bidi_stream( + request=request, + method=MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def list_parts( + self, + request: google_dot_protobuf_dot_empty__pb2.Empty, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> AsyncIterator[haberdasher__pb2.Hat.Part]: + return self.execute_server_stream( + request=request, + method=MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + async def do_nothing( + self, + request: google_dot_protobuf_dot_empty__pb2.Empty, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> google_dot_protobuf_dot_empty__pb2.Empty: + return await self.execute_unary( + request=request, + method=MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + +class HaberdasherSync(Protocol): + """Synchronous service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_flexible_hat": MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "make_similar_hats": MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + "make_various_hats": MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "list_parts": MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + "do_nothing": MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + }, + } + + def make_hat( + self, request: haberdasher__pb2.Size, ctx: RequestContext + ) -> haberdasher__pb2.Hat: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def make_flexible_hat( + self, request: Iterator[haberdasher__pb2.Size], ctx: RequestContext + ) -> haberdasher__pb2.Hat: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def make_similar_hats( + self, request: haberdasher__pb2.Size, ctx: RequestContext + ) -> Iterator[haberdasher__pb2.Hat]: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def make_various_hats( + self, request: Iterator[haberdasher__pb2.Size], ctx: RequestContext + ) -> Iterator[haberdasher__pb2.Hat]: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def list_parts( + self, request: google_dot_protobuf_dot_empty__pb2.Empty, ctx: RequestContext + ) -> Iterator[haberdasher__pb2.Hat.Part]: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + def do_nothing( + self, request: google_dot_protobuf_dot_empty__pb2.Empty, ctx: RequestContext + ) -> google_dot_protobuf_dot_empty__pb2.Empty: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + +class HaberdasherWSGIApplication(ConnecpyWSGIApplication): + def __init__( + self, + service: HaberdasherSync, + interceptors: Iterable[InterceptorSync] = (), + read_max_bytes: int | None = None, + ) -> None: + super().__init__( + endpoints={ + "/i2y.connecpy.example.Haberdasher/MakeHat": EndpointSync.unary( + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + function=service.make_hat, + ), + "/i2y.connecpy.example.Haberdasher/MakeFlexibleHat": EndpointSync.client_stream( + method=MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.make_flexible_hat, + ), + "/i2y.connecpy.example.Haberdasher/MakeSimilarHats": EndpointSync.server_stream( + method=MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + function=service.make_similar_hats, + ), + "/i2y.connecpy.example.Haberdasher/MakeVariousHats": EndpointSync.bidi_stream( + method=MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.make_various_hats, + ), + "/i2y.connecpy.example.Haberdasher/ListParts": EndpointSync.server_stream( + method=MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.list_parts, + ), + "/i2y.connecpy.example.Haberdasher/DoNothing": EndpointSync.unary( + method=MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + function=service.do_nothing, + ), + }, + interceptors=interceptors, + read_max_bytes=read_max_bytes, + ) + + @property + def path(self) -> str: + """Returns the URL path to mount the application to when serving multiple applications.""" + return "/i2y.connecpy.example.Haberdasher" + + +class HaberdasherClientSync(ConnecpyClientSync): + def make_hat( + self, + request: haberdasher__pb2.Size, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + use_get: bool = False, + ) -> haberdasher__pb2.Hat: + return self.execute_unary( + request=request, + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + headers=headers, + timeout_ms=timeout_ms, + use_get=use_get, + ) + + def make_flexible_hat( + self, + request: Iterator[haberdasher__pb2.Size], + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> haberdasher__pb2.Hat: + return self.execute_client_stream( + request=request, + method=MethodInfo( + name="MakeFlexibleHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def make_similar_hats( + self, + request: haberdasher__pb2.Size, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> Iterator[haberdasher__pb2.Hat]: + return self.execute_server_stream( + request=request, + method=MethodInfo( + name="MakeSimilarHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def make_various_hats( + self, + request: Iterator[haberdasher__pb2.Size], + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> Iterator[haberdasher__pb2.Hat]: + return self.execute_bidi_stream( + request=request, + method=MethodInfo( + name="MakeVariousHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher__pb2.Size, + output=haberdasher__pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def list_parts( + self, + request: google_dot_protobuf_dot_empty__pb2.Empty, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> Iterator[haberdasher__pb2.Hat.Part]: + return self.execute_server_stream( + request=request, + method=MethodInfo( + name="ListParts", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=haberdasher__pb2.Hat.Part, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) + + def do_nothing( + self, + request: google_dot_protobuf_dot_empty__pb2.Empty, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + ) -> google_dot_protobuf_dot_empty__pb2.Empty: + return self.execute_unary( + request=request, + method=MethodInfo( + name="DoNothing", + service_name="i2y.connecpy.example.Haberdasher", + input=google_dot_protobuf_dot_empty__pb2.Empty, + output=google_dot_protobuf_dot_empty__pb2.Empty, + idempotency_level=IdempotencyLevel.UNKNOWN, + ), + headers=headers, + timeout_ms=timeout_ms, + ) diff --git a/test/transport_api/haberdasher_edition_2023_connecpy.py b/test/transport_api/haberdasher_edition_2023_connecpy.py new file mode 100644 index 0000000..daf101d --- /dev/null +++ b/test/transport_api/haberdasher_edition_2023_connecpy.py @@ -0,0 +1,173 @@ +# Generated by https://github.com/i2y/connecpy/v2/protoc-gen-connecpy. DO NOT EDIT! +# source: haberdasher_edition_2023.proto + +from collections.abc import Iterable, Mapping +from typing import ClassVar, Protocol + +from connecpy.client import ConnecpyClient, ConnecpyClientSync +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.interceptor import Interceptor, InterceptorSync +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.request import Headers, RequestContext +from connecpy.server import ( + ConnecpyASGIApplication, + ConnecpyWSGIApplication, + Endpoint, + EndpointSync, +) + +from . import haberdasher_edition_2023_pb2 as haberdasher__edition__2023__pb2 + + +class Haberdasher(Protocol): + """Service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example2023.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + }, + } + + async def make_hat( + self, request: haberdasher__edition__2023__pb2.Size, ctx: RequestContext + ) -> haberdasher__edition__2023__pb2.Hat: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + +class HaberdasherASGIApplication(ConnecpyASGIApplication): + def __init__( + self, + service: Haberdasher, + *, + interceptors: Iterable[Interceptor] = (), + read_max_bytes: int | None = None, + ) -> None: + super().__init__( + endpoints={ + "/i2y.connecpy.example2023.Haberdasher/MakeHat": Endpoint.unary( + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + function=service.make_hat, + ) + }, + interceptors=interceptors, + read_max_bytes=read_max_bytes, + ) + + @property + def path(self) -> str: + """Returns the URL path to mount the application to when serving multiple applications.""" + return "/i2y.connecpy.example2023.Haberdasher" + + +class HaberdasherClient(ConnecpyClient): + async def make_hat( + self, + request: haberdasher__edition__2023__pb2.Size, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + use_get: bool = False, + ) -> haberdasher__edition__2023__pb2.Hat: + return await self.execute_unary( + request=request, + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + headers=headers, + timeout_ms=timeout_ms, + use_get=use_get, + ) + + +class HaberdasherSync(Protocol): + """Synchronous service protocol for Haberdasher.""" + + _service_info: ClassVar[dict] = { + "name": "i2y.connecpy.example2023.Haberdasher", + "methods": { + "make_hat": MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + }, + } + + def make_hat( + self, request: haberdasher__edition__2023__pb2.Size, ctx: RequestContext + ) -> haberdasher__edition__2023__pb2.Hat: + raise ConnecpyException(Code.UNIMPLEMENTED, "Not implemented") + + +class HaberdasherWSGIApplication(ConnecpyWSGIApplication): + def __init__( + self, + service: HaberdasherSync, + interceptors: Iterable[InterceptorSync] = (), + read_max_bytes: int | None = None, + ) -> None: + super().__init__( + endpoints={ + "/i2y.connecpy.example2023.Haberdasher/MakeHat": EndpointSync.unary( + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + function=service.make_hat, + ) + }, + interceptors=interceptors, + read_max_bytes=read_max_bytes, + ) + + @property + def path(self) -> str: + """Returns the URL path to mount the application to when serving multiple applications.""" + return "/i2y.connecpy.example2023.Haberdasher" + + +class HaberdasherClientSync(ConnecpyClientSync): + def make_hat( + self, + request: haberdasher__edition__2023__pb2.Size, + *, + headers: Headers | Mapping[str, str] | None = None, + timeout_ms: int | None = None, + use_get: bool = False, + ) -> haberdasher__edition__2023__pb2.Hat: + return self.execute_unary( + request=request, + method=MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example2023.Haberdasher", + input=haberdasher__edition__2023__pb2.Size, + output=haberdasher__edition__2023__pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ), + headers=headers, + timeout_ms=timeout_ms, + use_get=use_get, + ) diff --git a/test/transport_api/haberdasher_edition_2023_pb2.py b/test/transport_api/haberdasher_edition_2023_pb2.py new file mode 100644 index 0000000..7cceeb6 --- /dev/null +++ b/test/transport_api/haberdasher_edition_2023_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: haberdasher_edition_2023.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'haberdasher_edition_2023.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ehaberdasher_edition_2023.proto\x12\x18i2y.connecpy.example2023\"0\n\x03Hat\x12\x0c\n\x04size\x18\x01 \x01(\x05\x12\r\n\x05\x63olor\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\"+\n\x04Size\x12\x0e\n\x06inches\x18\x01 \x01(\x05\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t2\\\n\x0bHaberdasher\x12M\n\x07MakeHat\x12\x1e.i2y.connecpy.example2023.Size\x1a\x1d.i2y.connecpy.example2023.Hat\"\x03\x90\x02\x01\x42\rZ\x0b\x65xample2023b\x08\x65\x64itionsp\xe8\x07') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'haberdasher_edition_2023_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'Z\013example2023' + _globals['_HABERDASHER'].methods_by_name['MakeHat']._loaded_options = None + _globals['_HABERDASHER'].methods_by_name['MakeHat']._serialized_options = b'\220\002\001' + _globals['_HAT']._serialized_start=60 + _globals['_HAT']._serialized_end=108 + _globals['_SIZE']._serialized_start=110 + _globals['_SIZE']._serialized_end=153 + _globals['_HABERDASHER']._serialized_start=155 + _globals['_HABERDASHER']._serialized_end=247 +# @@protoc_insertion_point(module_scope) diff --git a/test/transport_api/haberdasher_edition_2023_pb2.pyi b/test/transport_api/haberdasher_edition_2023_pb2.pyi new file mode 100644 index 0000000..cd4e604 --- /dev/null +++ b/test/transport_api/haberdasher_edition_2023_pb2.pyi @@ -0,0 +1,23 @@ +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Optional as _Optional + +DESCRIPTOR: _descriptor.FileDescriptor + +class Hat(_message.Message): + __slots__ = ("size", "color", "name") + SIZE_FIELD_NUMBER: _ClassVar[int] + COLOR_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + size: int + color: str + name: str + def __init__(self, size: _Optional[int] = ..., color: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class Size(_message.Message): + __slots__ = ("inches", "description") + INCHES_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + inches: int + description: str + def __init__(self, inches: _Optional[int] = ..., description: _Optional[str] = ...) -> None: ... diff --git a/test/transport_api/haberdasher_pb2.py b/test/transport_api/haberdasher_pb2.py new file mode 100644 index 0000000..060da2a --- /dev/null +++ b/test/transport_api/haberdasher_pb2.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: haberdasher.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'haberdasher.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11haberdasher.proto\x12\x14i2y.connecpy.example\x1a\x1bgoogle/protobuf/empty.proto\"R\n\x03Hat\x12\x0c\n\x04size\x18\x01 \x01(\x05\x12\r\n\x05\x63olor\x18\x02 \x01(\t\x12\x11\n\x04name\x18\x03 \x01(\tH\x00\x88\x01\x01\x1a\x12\n\x04Part\x12\n\n\x02id\x18\x01 \x01(\tB\x07\n\x05_name\"+\n\x04Size\x12\x0e\n\x06inches\x18\x01 \x01(\x05\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t2\xc9\x03\n\x0bHaberdasher\x12\x45\n\x07MakeHat\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x03\x90\x02\x01\x12L\n\x0fMakeFlexibleHat\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x00(\x01\x12O\n\x0fMakeSimilarHats\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x03\x90\x02\x01\x30\x01\x12N\n\x0fMakeVariousHats\x12\x1a.i2y.connecpy.example.Size\x1a\x19.i2y.connecpy.example.Hat\"\x00(\x01\x30\x01\x12G\n\tListParts\x12\x16.google.protobuf.Empty\x1a\x1e.i2y.connecpy.example.Hat.Part\"\x00\x30\x01\x12;\n\tDoNothing\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.EmptyB\tZ\x07\x65xampleb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'haberdasher_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'Z\007example' + _globals['_HABERDASHER'].methods_by_name['MakeHat']._loaded_options = None + _globals['_HABERDASHER'].methods_by_name['MakeHat']._serialized_options = b'\220\002\001' + _globals['_HABERDASHER'].methods_by_name['MakeSimilarHats']._loaded_options = None + _globals['_HABERDASHER'].methods_by_name['MakeSimilarHats']._serialized_options = b'\220\002\001' + _globals['_HAT']._serialized_start=72 + _globals['_HAT']._serialized_end=154 + _globals['_HAT_PART']._serialized_start=127 + _globals['_HAT_PART']._serialized_end=145 + _globals['_SIZE']._serialized_start=156 + _globals['_SIZE']._serialized_end=199 + _globals['_HABERDASHER']._serialized_start=202 + _globals['_HABERDASHER']._serialized_end=659 +# @@protoc_insertion_point(module_scope) diff --git a/test/transport_api/haberdasher_pb2.pyi b/test/transport_api/haberdasher_pb2.pyi new file mode 100644 index 0000000..f0a7c71 --- /dev/null +++ b/test/transport_api/haberdasher_pb2.pyi @@ -0,0 +1,29 @@ +from google.protobuf import empty_pb2 as _empty_pb2 +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Optional as _Optional + +DESCRIPTOR: _descriptor.FileDescriptor + +class Hat(_message.Message): + __slots__ = ("size", "color", "name") + class Part(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + SIZE_FIELD_NUMBER: _ClassVar[int] + COLOR_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + size: int + color: str + name: str + def __init__(self, size: _Optional[int] = ..., color: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class Size(_message.Message): + __slots__ = ("inches", "description") + INCHES_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + inches: int + description: str + def __init__(self, inches: _Optional[int] = ..., description: _Optional[str] = ...) -> None: ... diff --git a/test/transport_api/proto b/test/transport_api/proto new file mode 120000 index 0000000..5c8d352 --- /dev/null +++ b/test/transport_api/proto @@ -0,0 +1 @@ +../proto \ No newline at end of file diff --git a/test/transport_api/test_error_handling.py b/test/transport_api/test_error_handling.py new file mode 100644 index 0000000..1a4201f --- /dev/null +++ b/test/transport_api/test_error_handling.py @@ -0,0 +1,368 @@ +"""Test error handling across Connect and gRPC transports.""" + +import unittest +from unittest.mock import MagicMock, Mock, patch + +import httpx +import pytest + +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import CallOptions, ConnectTransport, GrpcTransport + + +class TestGrpcErrorHandling(unittest.TestCase): + """Test error handling in GrpcTransport.""" + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_unary_unary_error_without_retry(self, mock_grpc): + """Test that unary_unary properly converts gRPC errors without retry.""" + # Setup mock channel and stub + mock_channel = MagicMock() + mock_grpc.insecure_channel.return_value = mock_channel + + # Create a custom RpcError class and instance + class MockRpcError(Exception): + def __init__(self): + super().__init__() + self.code = lambda: mock_grpc.StatusCode.UNAVAILABLE + self.details = lambda: "Service unavailable" + + # Setup the mock StatusCode enum + mock_grpc.StatusCode.UNAVAILABLE = "UNAVAILABLE" + mock_grpc.RpcError = MockRpcError + + # Setup the stub to raise an error + mock_stub = MagicMock(side_effect=MockRpcError()) + mock_channel.unary_unary.return_value = mock_stub + + # Create transport + transport = GrpcTransport("localhost:50051") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {"SerializeToString": lambda _: b"test"}), + output=type( + "TestOutput", (), {"FromString": classmethod(lambda _, _x: {})} + ), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test that error is properly converted + request = Mock() + request.SerializeToString = lambda: b"test" + + with pytest.raises(ConnecpyException) as context: + transport.unary_unary(method, request, CallOptions()) + + # Verify the exception has correct code + assert context.value.code == Code.UNAVAILABLE + assert "Service unavailable" in str(context.value) + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_stream_unary_error_without_retry(self, mock_grpc): + """Test that stream_unary properly converts gRPC errors without retry.""" + # Setup mock channel and stub + mock_channel = MagicMock() + mock_grpc.insecure_channel.return_value = mock_channel + + # Create a custom RpcError class and instance + class MockRpcError(Exception): + def __init__(self): + super().__init__() + self.code = lambda: mock_grpc.StatusCode.DEADLINE_EXCEEDED + self.details = lambda: "Deadline exceeded" + + # Setup the mock StatusCode enum + mock_grpc.StatusCode.DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED" + mock_grpc.RpcError = MockRpcError + + # Setup the stub to raise an error + mock_stub = MagicMock(side_effect=MockRpcError()) + mock_channel.stream_unary.return_value = mock_stub + + # Create transport + transport = GrpcTransport("localhost:50051") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {"SerializeToString": lambda _: b"test"}), + output=type( + "TestOutput", (), {"FromString": classmethod(lambda _, _x: {})} + ), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Create a mock stream + mock_stream = iter([Mock(SerializeToString=lambda: b"test")]) + + with pytest.raises(ConnecpyException) as context: + transport.stream_unary(method, mock_stream, CallOptions()) + + # Verify the exception has correct code + assert context.value.code == Code.DEADLINE_EXCEEDED + assert "Deadline exceeded" in str(context.value) + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_grpc_status_code_mapping(self, mock_grpc): + """Test that all gRPC status codes are properly mapped.""" + mock_channel = MagicMock() + mock_grpc.insecure_channel.return_value = mock_channel + + # Setup mock StatusCode enum values - must match exactly what's in grpc.py + mock_grpc.StatusCode.CANCELLED = MagicMock() + mock_grpc.StatusCode.UNKNOWN = MagicMock() + mock_grpc.StatusCode.INVALID_ARGUMENT = MagicMock() + mock_grpc.StatusCode.DEADLINE_EXCEEDED = MagicMock() + mock_grpc.StatusCode.NOT_FOUND = MagicMock() + mock_grpc.StatusCode.ALREADY_EXISTS = MagicMock() + mock_grpc.StatusCode.PERMISSION_DENIED = MagicMock() + mock_grpc.StatusCode.RESOURCE_EXHAUSTED = MagicMock() + mock_grpc.StatusCode.FAILED_PRECONDITION = MagicMock() + mock_grpc.StatusCode.ABORTED = MagicMock() + mock_grpc.StatusCode.OUT_OF_RANGE = MagicMock() + mock_grpc.StatusCode.UNIMPLEMENTED = MagicMock() + mock_grpc.StatusCode.INTERNAL = MagicMock() + mock_grpc.StatusCode.UNAVAILABLE = MagicMock() + mock_grpc.StatusCode.DATA_LOSS = MagicMock() + mock_grpc.StatusCode.UNAUTHENTICATED = MagicMock() + + transport = GrpcTransport("localhost:50051") + + # Test status code mappings + status_mappings = [ + (mock_grpc.StatusCode.CANCELLED, Code.CANCELED), + (mock_grpc.StatusCode.UNKNOWN, Code.UNKNOWN), + (mock_grpc.StatusCode.INVALID_ARGUMENT, Code.INVALID_ARGUMENT), + (mock_grpc.StatusCode.DEADLINE_EXCEEDED, Code.DEADLINE_EXCEEDED), + (mock_grpc.StatusCode.NOT_FOUND, Code.NOT_FOUND), + (mock_grpc.StatusCode.ALREADY_EXISTS, Code.ALREADY_EXISTS), + (mock_grpc.StatusCode.PERMISSION_DENIED, Code.PERMISSION_DENIED), + (mock_grpc.StatusCode.RESOURCE_EXHAUSTED, Code.RESOURCE_EXHAUSTED), + (mock_grpc.StatusCode.FAILED_PRECONDITION, Code.FAILED_PRECONDITION), + (mock_grpc.StatusCode.ABORTED, Code.ABORTED), + (mock_grpc.StatusCode.OUT_OF_RANGE, Code.OUT_OF_RANGE), + (mock_grpc.StatusCode.UNIMPLEMENTED, Code.UNIMPLEMENTED), + (mock_grpc.StatusCode.INTERNAL, Code.INTERNAL), + (mock_grpc.StatusCode.UNAVAILABLE, Code.UNAVAILABLE), + (mock_grpc.StatusCode.DATA_LOSS, Code.DATA_LOSS), + (mock_grpc.StatusCode.UNAUTHENTICATED, Code.UNAUTHENTICATED), + ] + + for grpc_status, expected_code in status_mappings: + result = transport._grpc_status_to_code(grpc_status) + assert result == expected_code + + +class TestConnectErrorHandling(unittest.TestCase): + """Test error handling in ConnectTransport.""" + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_unary_timeout_error(self, mock_client_class): + """Test that timeout errors are properly converted to DEADLINE_EXCEEDED.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + # Make execute_unary raise a timeout exception + mock_client.execute_unary.side_effect = httpx.TimeoutException( + "Request timeout" + ) + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + request = {"test": "data"} + call_options = CallOptions(timeout_ms=1000) + + with pytest.raises(ConnecpyException) as context: + transport.unary_unary(method, request, call_options) + + # Verify the exception has correct code + assert context.value.code == Code.DEADLINE_EXCEEDED + assert "timeout" in str(context.value).lower() + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_server_stream_timeout_error(self, mock_client_class): + """Test that server stream timeout errors are properly converted.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + # Make execute_server_stream raise a timeout exception + mock_client.execute_server_stream.side_effect = httpx.TimeoutException( + "Stream timeout" + ) + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + request = {"test": "data"} + call_options = CallOptions() + + with pytest.raises(ConnecpyException) as context: + # Since unary_stream returns an iterator, we need to trigger the actual call + transport.unary_stream(method, request, call_options) + # The actual exception would be raised when we consume the iterator + # but in our mock setup, it raises immediately + + assert context.value.code == Code.DEADLINE_EXCEEDED + assert "timeout" in str(context.value).lower() + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_client_stream_timeout_error(self, mock_client_class): + """Test that client stream timeout errors are properly converted.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + # Make execute_client_stream raise a timeout exception + mock_client.execute_client_stream.side_effect = httpx.TimeoutException( + "Client stream timeout" + ) + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + stream = iter([{"test": "data1"}, {"test": "data2"}]) + call_options = CallOptions() + + with pytest.raises(ConnecpyException) as context: + transport.stream_unary(method, stream, call_options) + + assert context.value.code == Code.DEADLINE_EXCEEDED + assert "timeout" in str(context.value).lower() + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_bidi_stream_timeout_error(self, mock_client_class): + """Test that bidirectional stream timeout errors are properly converted.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + # Make execute_bidi_stream raise a timeout exception + mock_client.execute_bidi_stream.side_effect = httpx.TimeoutException( + "Bidi stream timeout" + ) + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + stream = iter([{"test": "data1"}, {"test": "data2"}]) + call_options = CallOptions() + + with pytest.raises(ConnecpyException) as context: + # Since stream_stream returns an iterator, we need to trigger the actual call + transport.stream_stream(method, stream, call_options) + + assert context.value.code == Code.DEADLINE_EXCEEDED + assert "timeout" in str(context.value).lower() + + +class TestConsistentErrorHandling(unittest.TestCase): + """Test that errors are handled consistently across transports.""" + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_timeout_error_consistency(self, mock_connect_client, mock_grpc): + """Test that both transports handle timeout errors consistently.""" + # Setup gRPC transport to raise DEADLINE_EXCEEDED + mock_grpc_channel = MagicMock() + mock_grpc.insecure_channel.return_value = mock_grpc_channel + + # Create a custom RpcError class and instance + class MockRpcError(Exception): + def __init__(self): + super().__init__() + self.code = lambda: mock_grpc.StatusCode.DEADLINE_EXCEEDED + self.details = lambda: "Deadline exceeded" + + # Setup the mock StatusCode enum + mock_grpc.StatusCode.DEADLINE_EXCEEDED = MagicMock() + mock_grpc.RpcError = MockRpcError + + mock_grpc_stub = MagicMock(side_effect=MockRpcError()) + mock_grpc_channel.unary_unary.return_value = mock_grpc_stub + + # Setup Connect transport to raise timeout + mock_connect = MagicMock() + mock_connect_client.return_value = mock_connect + mock_connect.execute_unary.side_effect = httpx.TimeoutException("Timeout") + + # Create both transports + grpc_transport = GrpcTransport("localhost:50051") + connect_transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {"SerializeToString": lambda _: b"test"}), + output=type( + "TestOutput", (), {"FromString": classmethod(lambda _, _x: {})} + ), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + request = Mock(SerializeToString=lambda: b"test") + + # Test gRPC transport + with pytest.raises(ConnecpyException) as grpc_context: + grpc_transport.unary_unary(method, request, CallOptions()) + + # Test Connect transport + with pytest.raises(ConnecpyException) as connect_context: + connect_transport.unary_unary(method, request, CallOptions()) + + # Both should have the same error code + assert grpc_context.value.code == Code.DEADLINE_EXCEEDED + assert connect_context.value.code == Code.DEADLINE_EXCEEDED + + # Both should be ConnecpyException + assert isinstance(grpc_context.value, ConnecpyException) + assert isinstance(connect_context.value, ConnecpyException) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/transport_api/test_generated_transport_api.py b/test/transport_api/test_generated_transport_api.py new file mode 100644 index 0000000..137c59a --- /dev/null +++ b/test/transport_api/test_generated_transport_api.py @@ -0,0 +1,176 @@ +"""Test for the generated Transport API code (requires transport_api=true in code generation).""" + +import subprocess +import sys +import tempfile +from pathlib import Path + +import pytest + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Test uses Unix-specific protoc invocation" +) +def test_generated_transport_api(): + """Test that generated Transport API code works correctly.""" + + # Check if protoc is available + try: + subprocess.run(["protoc", "--version"], check=True, capture_output=True) + except (FileNotFoundError, subprocess.CalledProcessError): + pytest.skip("protoc not available") + + # Create a minimal proto file for testing + proto_content = """ +syntax = "proto3"; +package test; + +message Request { + string data = 1; +} + +message Response { + string result = 1; +} + +service TestService { + rpc TestMethod(Request) returns (Response); +} +""" + + with tempfile.TemporaryDirectory() as tmpdir: + tmpdir_path = Path(tmpdir) + + # Write proto file + proto_file = tmpdir_path / "test.proto" + proto_file.write_text(proto_content) + + # Build the plugin first + plugin_dir = Path(__file__).parent.parent.parent / "protoc-gen-connecpy" + build_result = subprocess.run( + ["go", "build", "-o", str(tmpdir_path / "protoc-gen-connecpy")], + check=False, + cwd=plugin_dir, + capture_output=True, + text=True, + ) + + if build_result.returncode != 0: + pytest.skip(f"Failed to build plugin: {build_result.stderr}") + + # Generate code with Transport API enabled + result = subprocess.run( + [ + "protoc", + f"--plugin=protoc-gen-connecpy={tmpdir_path / 'protoc-gen-connecpy'}", + f"--connecpy_out=transport_api=true:{tmpdir}", + f"--python_out={tmpdir}", + f"-I{tmpdir}", + str(proto_file), + ], + check=False, + capture_output=True, + text=True, + ) + + if result.returncode != 0: + pytest.skip(f"protoc failed: {result.stderr}") + + # Check that generated file contains Transport API code + generated_file = tmpdir_path / "test_connecpy.py" + assert generated_file.exists(), "Generated file not found" + + content = generated_file.read_text() + + # Check for Transport API specific code + assert "class TestServiceClientProtocol(Protocol):" in content + assert "class TestServiceClientSyncProtocol(Protocol):" in content + assert "def create_client(" in content + assert "def create_client_sync(" in content + assert ( + "from connecpy.transport.client.connect_async import ConnectTransportAsync" + in content + ) + assert ( + "from connecpy.transport.client.grpc_async import GrpcTransportAsync" + in content + ) + + # Verify the imports are properly configured + assert "# noqa: PLC0415" in content # Import suppression for late imports + + +def test_transport_api_not_generated_by_default(): + """Test that Transport API is NOT generated without transport_api=true.""" + + # Check if protoc is available + try: + subprocess.run(["protoc", "--version"], check=True, capture_output=True) + except (FileNotFoundError, subprocess.CalledProcessError): + pytest.skip("protoc not available") + + proto_content = """ +syntax = "proto3"; +package test; + +message Request { + string data = 1; +} + +message Response { + string result = 1; +} + +service TestService { + rpc TestMethod(Request) returns (Response); +} +""" + + with tempfile.TemporaryDirectory() as tmpdir: + tmpdir_path = Path(tmpdir) + + # Write proto file + proto_file = tmpdir_path / "test.proto" + proto_file.write_text(proto_content) + + # Build the plugin first + plugin_dir = Path(__file__).parent.parent.parent / "protoc-gen-connecpy" + build_result = subprocess.run( + ["go", "build", "-o", str(tmpdir_path / "protoc-gen-connecpy")], + check=False, + cwd=plugin_dir, + capture_output=True, + text=True, + ) + + if build_result.returncode != 0: + pytest.skip(f"Failed to build plugin: {build_result.stderr}") + + # Generate code WITHOUT Transport API enabled + result = subprocess.run( + [ + "protoc", + f"--plugin=protoc-gen-connecpy={tmpdir_path / 'protoc-gen-connecpy'}", + f"--connecpy_out={tmpdir}", + f"--python_out={tmpdir}", + f"-I{tmpdir}", + str(proto_file), + ], + check=False, + capture_output=True, + text=True, + ) + + if result.returncode != 0: + pytest.skip(f"protoc failed: {result.stderr}") + + # Check that generated file does NOT contain Transport API code + generated_file = tmpdir_path / "test_connecpy.py" + assert generated_file.exists(), "Generated file not found" + + content = generated_file.read_text() + + # Check that Transport API specific code is NOT present + assert "class TestServiceClientProtocol(Protocol):" not in content + assert "def create_client(" not in content + assert "from connecpy.transport.client" not in content diff --git a/test/transport_api/test_grpc_transport_integration.py b/test/transport_api/test_grpc_transport_integration.py new file mode 100644 index 0000000..02d8f4f --- /dev/null +++ b/test/transport_api/test_grpc_transport_integration.py @@ -0,0 +1,77 @@ +"""Integration test for gRPC transport.""" + +from concurrent import futures + +import pytest + +try: + import grpc # type: ignore[import-untyped] +except ImportError: + pytest.skip("grpc not available", allow_module_level=True) + +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import GrpcTransport + +from . import haberdasher_pb2 + +try: + from . import haberdasher_pb2_grpc # type: ignore[import-not-found] +except ImportError: + # haberdasher_pb2_grpc might not be generated, create a minimal stub + class HaberdasherServicer: + """Minimal servicer stub.""" + + def MakeHat(self, request, context): # noqa: N802 + pass + + haberdasher_pb2_grpc = type( + "Module", + (), + { + "HaberdasherServicer": HaberdasherServicer, + "add_HaberdasherServicer_to_server": lambda _service, _server: None, + }, + ) + + +class SimpleHaberdasherService(haberdasher_pb2_grpc.HaberdasherServicer): # type: ignore[name-defined] + """Simple test service.""" + + def MakeHat(self, request, context): # noqa: N802 + return haberdasher_pb2.Hat( + size=request.inches, color="test-color", name="test-hat" + ) + + +@pytest.fixture +def grpc_server(): + """Start a gRPC server for testing.""" + server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) + service = SimpleHaberdasherService() + haberdasher_pb2_grpc.add_HaberdasherServicer_to_server(service, server) # type: ignore[attr-defined] + port = server.add_insecure_port("[::]:0") + server.start() + yield port + server.stop(grace=0) + + +def test_grpc_transport_basic(grpc_server): + """Test basic gRPC transport functionality.""" + transport = GrpcTransport(f"localhost:{grpc_server}") + + method = MethodInfo( + name="MakeHat", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + request = haberdasher_pb2.Size(inches=10, description="Test") + response = transport.unary_unary(method, request) + + assert response.size == 10 + assert response.color == "test-color" + assert response.name == "test-hat" + + transport.close() diff --git a/test/transport_api/test_grpc_transport_streaming.py b/test/transport_api/test_grpc_transport_streaming.py new file mode 100644 index 0000000..80f2e02 --- /dev/null +++ b/test/transport_api/test_grpc_transport_streaming.py @@ -0,0 +1,345 @@ +"""Integration tests for gRPC transport with streaming RPCs.""" + +from concurrent import futures +from unittest.mock import patch + +import pytest + +try: + import grpc # type: ignore[import-untyped] + import grpc.aio # type: ignore[import-untyped] +except ImportError: + pytest.skip("grpc not available", allow_module_level=True) + +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import GrpcTransport, GrpcTransportAsync + +from . import haberdasher_pb2 + +try: + from . import haberdasher_pb2_grpc # type: ignore[import-not-found] +except ImportError: + # haberdasher_pb2_grpc might not be generated, create a minimal stub + class HaberdasherServicer: + """Minimal servicer stub.""" + + haberdasher_pb2_grpc = type( + "Module", + (), + { + "HaberdasherServicer": HaberdasherServicer, + "add_HaberdasherServicer_to_server": lambda _service, _server: None, + }, + ) + + +class StreamingHaberdasherService(haberdasher_pb2_grpc.HaberdasherServicer): # type: ignore[name-defined] + """Test service with streaming methods.""" + + def MakeHat(self, request, context): # noqa: N802 + """Unary-unary RPC.""" + return haberdasher_pb2.Hat( + size=request.inches, color="test-color", name="test-hat" + ) + + def StreamHats(self, request, context): # noqa: N802 + """Unary-stream RPC - Returns multiple hats for one size.""" + colors = ["red", "blue", "green"] + for color in colors: + yield haberdasher_pb2.Hat( + size=request.inches, color=color, name=f"{color}-hat" + ) + + def MakeHatFromSizes(self, request_iterator, context): # noqa: N802 + """Stream-unary RPC - Takes multiple sizes and returns one hat.""" + total_size = 0 + count = 0 + for size_request in request_iterator: + total_size += size_request.inches + count += 1 + avg_size = total_size // count if count > 0 else 0 + return haberdasher_pb2.Hat( + size=avg_size, color="averaged", name=f"avg-{avg_size}-hat" + ) + + def StreamToStream(self, request_iterator, context): # noqa: N802 + """Stream-stream RPC - Transforms each input size to a hat.""" + for size_request in request_iterator: + yield haberdasher_pb2.Hat( + size=size_request.inches, + color=f"color-{size_request.inches}", + name=f"hat-{size_request.inches}", + ) + + +@pytest.fixture +def grpc_server(): + """Start a gRPC server with streaming support for testing.""" + server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) + service = StreamingHaberdasherService() + haberdasher_pb2_grpc.add_HaberdasherServicer_to_server(service, server) # type: ignore[attr-defined] + port = server.add_insecure_port("[::]:0") + server.start() + yield port + server.stop(grace=0) + + +def test_grpc_unary_stream(grpc_server): + """Test unary-stream RPC with gRPC transport.""" + transport = GrpcTransport(f"localhost:{grpc_server}") + + # Mock the method to return stream output type + method = MethodInfo( + name="StreamHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Since the actual service doesn't have StreamHats, we need to mock it + with patch.object(transport, "_get_or_create_stub") as mock_stub: + # Create a mock that returns an iterator + def mock_stream_call(request, metadata=None, timeout=None): # noqa: ARG001 + colors = ["red", "blue", "green"] + for color in colors: + yield haberdasher_pb2.Hat( + size=request.inches, color=color, name=f"{color}-hat" + ) + + mock_stub.return_value = mock_stream_call + + request = haberdasher_pb2.Size(inches=12, description="Test") + response_stream = transport.unary_stream(method, request) + + # Consume the stream + hats = list(response_stream) + + assert len(hats) == 3 + assert hats[0].color == "red" + assert hats[1].color == "blue" + assert hats[2].color == "green" + assert all(hat.size == 12 for hat in hats) + + transport.close() + + +def test_grpc_stream_unary(grpc_server): + """Test stream-unary RPC with gRPC transport.""" + transport = GrpcTransport(f"localhost:{grpc_server}") + + method = MethodInfo( + name="MakeHatFromSizes", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Mock the stub to handle stream input + with patch.object(transport, "_get_or_create_stub") as mock_stub: + + def mock_stream_unary_call(request_stream, metadata=None, timeout=None): # noqa: ARG001 + total_size = 0 + count = 0 + for size_request in request_stream: + total_size += size_request.inches + count += 1 + avg_size = total_size // count if count > 0 else 0 + return haberdasher_pb2.Hat( + size=avg_size, color="averaged", name=f"avg-{avg_size}-hat" + ) + + mock_stub.return_value = mock_stream_unary_call + + # Create a stream of sizes + def size_stream(): + for inches in [10, 12, 14]: + yield haberdasher_pb2.Size(inches=inches, description=f"Size {inches}") + + response = transport.stream_unary(method, size_stream()) + + assert response.size == 12 # Average of 10, 12, 14 + assert response.color == "averaged" + assert response.name == "avg-12-hat" + + transport.close() + + +def test_grpc_stream_stream(grpc_server): + """Test stream-stream RPC with gRPC transport.""" + transport = GrpcTransport(f"localhost:{grpc_server}") + + method = MethodInfo( + name="StreamToStream", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Mock the stub to handle bidirectional streaming + with patch.object(transport, "_get_or_create_stub") as mock_stub: + + def mock_stream_stream_call(request_stream, metadata=None, timeout=None): # noqa: ARG001 + for size_request in request_stream: + yield haberdasher_pb2.Hat( + size=size_request.inches, + color=f"color-{size_request.inches}", + name=f"hat-{size_request.inches}", + ) + + mock_stub.return_value = mock_stream_stream_call + + # Create a stream of sizes + def size_stream(): + for inches in [8, 10, 12]: + yield haberdasher_pb2.Size(inches=inches, description=f"Size {inches}") + + response_stream = transport.stream_stream(method, size_stream()) + + # Consume the response stream + hats = list(response_stream) + + assert len(hats) == 3 + assert hats[0].size == 8 + assert hats[0].color == "color-8" + assert hats[1].size == 10 + assert hats[1].color == "color-10" + assert hats[2].size == 12 + assert hats[2].color == "color-12" + + transport.close() + + +@pytest.mark.asyncio +async def test_grpc_async_unary_stream(grpc_server): + """Test async unary-stream RPC with gRPC transport.""" + transport = GrpcTransportAsync(f"localhost:{grpc_server}") + + method = MethodInfo( + name="StreamHats", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Mock the async stub + with patch.object(transport, "_get_or_create_stub") as mock_stub: + + async def async_stream(): + colors = ["red", "blue", "green"] + for color in colors: + yield haberdasher_pb2.Hat(size=12, color=color, name=f"{color}-hat") + + def mock_stream_call(request, metadata=None, timeout=None): # noqa: ARG001 + return async_stream() + + mock_stub.return_value = mock_stream_call + + request = haberdasher_pb2.Size(inches=12, description="Test") + response_stream = transport.unary_stream(method, request) + + # Consume the async stream + hats = [] + async for hat in response_stream: + hats.append(hat) + + assert len(hats) == 3 + assert hats[0].color == "red" + assert hats[1].color == "blue" + assert hats[2].color == "green" + + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_async_stream_unary(grpc_server): + """Test async stream-unary RPC with gRPC transport.""" + transport = GrpcTransportAsync(f"localhost:{grpc_server}") + + method = MethodInfo( + name="MakeHatFromSizes", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Mock the async stub + with patch.object(transport, "_get_or_create_stub") as mock_stub: + + async def mock_stream_unary_call(request_stream, metadata=None, timeout=None): # noqa: ARG001 + total_size = 0 + count = 0 + async for size_request in request_stream: + total_size += size_request.inches + count += 1 + avg_size = total_size // count if count > 0 else 0 + return haberdasher_pb2.Hat( + size=avg_size, color="averaged", name=f"avg-{avg_size}-hat" + ) + + mock_stub.return_value = mock_stream_unary_call + + # Create an async stream of sizes + async def size_stream(): + for inches in [10, 12, 14]: + yield haberdasher_pb2.Size(inches=inches, description=f"Size {inches}") + + response = await transport.stream_unary(method, size_stream()) + + assert response.size == 12 # Average of 10, 12, 14 + assert response.color == "averaged" + + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_async_stream_stream(grpc_server): + """Test async stream-stream RPC with gRPC transport.""" + transport = GrpcTransportAsync(f"localhost:{grpc_server}") + + method = MethodInfo( + name="StreamToStream", + service_name="i2y.connecpy.example.Haberdasher", + input=haberdasher_pb2.Size, + output=haberdasher_pb2.Hat, + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Mock the async stub + with patch.object(transport, "_get_or_create_stub") as mock_stub: + + async def mock_stream_stream_gen(request_stream, metadata=None, timeout=None): # noqa: ARG001 + async for size_request in request_stream: + yield haberdasher_pb2.Hat( + size=size_request.inches, + color=f"color-{size_request.inches}", + name=f"hat-{size_request.inches}", + ) + + def mock_stream_stream_call(request_stream, metadata=None, timeout=None): + return mock_stream_stream_gen(request_stream, metadata, timeout) + + mock_stub.return_value = mock_stream_stream_call + + # Create an async stream of sizes + async def size_stream(): + for inches in [8, 10, 12]: + yield haberdasher_pb2.Size(inches=inches, description=f"Size {inches}") + + response_stream = transport.stream_stream(method, size_stream()) + + # Consume the async response stream + hats = [] + async for hat in response_stream: + hats.append(hat) + + assert len(hats) == 3 + assert hats[0].size == 8 + assert hats[1].size == 10 + assert hats[2].size == 12 + + await transport.close() diff --git a/test/transport_api/test_transport.py b/test/transport_api/test_transport.py new file mode 100644 index 0000000..fd9f1ad --- /dev/null +++ b/test/transport_api/test_transport.py @@ -0,0 +1,105 @@ +"""Tests for the Transport API.""" + +import pytest + +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.transport.client import CallOptions, ConnectTransport, RetryPolicy + + +def test_call_options_creation(): + """Test creating CallOptions.""" + options = CallOptions(timeout_ms=5000, headers={"x-custom": "value"}) + assert options.timeout_ms == 5000 + assert options.headers["x-custom"] == "value" + + +def test_retry_policy_defaults(): + """Test RetryPolicy default values.""" + policy = RetryPolicy() + assert policy.max_attempts == 3 + assert policy.initial_backoff_ms == 100 + assert policy.max_backoff_ms == 5000 + assert policy.backoff_multiplier == 2.0 + assert policy.retryable_codes is not None + assert Code.UNAVAILABLE in policy.retryable_codes + assert Code.DEADLINE_EXCEEDED in policy.retryable_codes + + +def test_retry_policy_custom(): + """Test custom RetryPolicy.""" + policy = RetryPolicy( + max_attempts=5, initial_backoff_ms=200, retryable_codes=[Code.INTERNAL] + ) + assert policy.max_attempts == 5 + assert policy.initial_backoff_ms == 200 + assert policy.retryable_codes == [Code.INTERNAL] + + +def test_call_options(): + """Test CallOptions.""" + options = CallOptions(timeout_ms=3000, headers={"x-request-id": "123"}) + assert options.timeout_ms == 3000 + assert options.headers["x-request-id"] == "123" + + +def test_create_connect_transport(): + """Test creating a Connect transport.""" + transport = ConnectTransport("https://example.com") + assert isinstance(transport, ConnectTransport) + assert transport.address == "https://example.com" + + # With timeout + transport = ConnectTransport("https://example.com", timeout_ms=10000) + assert transport.timeout_ms == 10000 + + +def test_connect_transport_merge_options(): + """Test merging call options with transport defaults.""" + transport = ConnectTransport("https://example.com", timeout_ms=5000) + + # No call options - should use transport defaults + merged = transport._merge_options(None) + assert merged.timeout_ms == 5000 + + # Call options override + call_options = CallOptions(timeout_ms=3000, headers={"x-custom": "override"}) + merged = transport._merge_options(call_options) + assert merged.timeout_ms == 3000 + assert merged.headers["x-custom"] == "override" + + +def test_connect_transport_retry_logic(): + """Test retry logic in ConnectTransport.""" + transport = ConnectTransport("https://example.com") + policy = RetryPolicy(max_attempts=3, initial_backoff_ms=10) + + # Test successful retry + attempt_count = 0 + + def failing_func(): + nonlocal attempt_count + attempt_count += 1 + if attempt_count < 3: + raise ConnecpyException(Code.UNAVAILABLE, "Service unavailable") + return "success" + + result = transport._execute_with_retry(failing_func, policy) + assert result == "success" + assert attempt_count == 3 + + # Test non-retryable error + def non_retryable_func(): + raise ConnecpyException(Code.INVALID_ARGUMENT, "Bad request") + + with pytest.raises(ConnecpyException) as exc_info: + transport._execute_with_retry(non_retryable_func, policy) + assert exc_info.value.code == Code.INVALID_ARGUMENT + + # Test max attempts exceeded + def always_failing_func(): + raise ConnecpyException(Code.UNAVAILABLE, "Always fails") + + with pytest.raises(ConnecpyException) as exc_info: + transport._execute_with_retry(always_failing_func, policy) + assert exc_info.value.code == Code.UNAVAILABLE diff --git a/test/transport_api/test_transport_api.py b/test/transport_api/test_transport_api.py new file mode 100644 index 0000000..66dbfb6 --- /dev/null +++ b/test/transport_api/test_transport_api.py @@ -0,0 +1,256 @@ +"""Tests for the Transport API and create_client functionality.""" + +import unittest +from unittest.mock import MagicMock, Mock, patch + +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import CallOptions, ConnectTransport, GrpcTransport +from connecpy.transport.client.client import create_client_sync + + +class TestConnectTransport(unittest.TestCase): + """Test the ConnectTransport class.""" + + def test_connect_transport_init(self): + """Test ConnectTransport initialization with all parameters.""" + transport = ConnectTransport( + "http://localhost:3000", + proto_json=True, + accept_compression=["gzip"], + send_compression="gzip", + timeout_ms=5000, + read_max_bytes=1000000, + interceptors=[], + session=None, + ) + + assert transport.address == "http://localhost:3000" + assert transport.proto_json is True + assert transport.accept_compression == ["gzip"] + assert transport.send_compression == "gzip" + assert transport.timeout_ms == 5000 + assert transport.read_max_bytes == 1000000 + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_connect_transport_unary_call(self, mock_client_class): + """Test ConnectTransport unary_unary method.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + mock_client.execute_unary.return_value = {"result": "test"} + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test unary call + request = {"test": "data"} + call_options = CallOptions(headers={"x-test": "header"}, timeout_ms=1000) + + result = transport.unary_unary(method, request, call_options) + + # Verify the client was called correctly + mock_client.execute_unary.assert_called_once() + assert result == {"result": "test"} + + def test_connect_transport_close(self): + """Test ConnectTransport close method.""" + with patch( + "connecpy.transport.client.connect.ConnecpyClientSync" + ) as mock_client_class: + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + transport = ConnectTransport("http://localhost:3000") + transport.close() + + mock_client.close.assert_called_once() + + +class TestGrpcTransport(unittest.TestCase): + """Test the GrpcTransport class.""" + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_grpc_transport_init_insecure(self, mock_grpc): + """Test GrpcTransport initialization with insecure channel.""" + mock_grpc.insecure_channel.return_value = MagicMock() + + transport = GrpcTransport("localhost:50051") + + mock_grpc.insecure_channel.assert_called_once_with( + "localhost:50051", options=[] + ) + assert transport._target == "localhost:50051" + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_grpc_transport_init_secure(self, mock_grpc): + """Test GrpcTransport initialization with secure channel.""" + mock_grpc.secure_channel.return_value = MagicMock() + mock_credentials = MagicMock() + + _ = GrpcTransport( + "api.example.com:443", + credentials=mock_credentials, + options=[("grpc.max_receive_message_length", 10000000)], + ) + + mock_grpc.secure_channel.assert_called_once_with( + "api.example.com:443", + mock_credentials, + options=[("grpc.max_receive_message_length", 10000000)], + ) + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_grpc_transport_with_compression(self, mock_grpc): + """Test GrpcTransport with compression.""" + mock_grpc.insecure_channel.return_value = MagicMock() + + _ = GrpcTransport("localhost:50051", compression="gzip") + + # Check that compression was added to options + call_args = mock_grpc.insecure_channel.call_args + options = call_args[1]["options"] + # The transport converts "gzip" to the numeric value 2 + assert ("grpc.default_compression_algorithm", 2) in options + + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_grpc_transport_unary_call(self, mock_grpc): + """Test GrpcTransport unary_unary method.""" + mock_channel = MagicMock() + mock_grpc.insecure_channel.return_value = mock_channel + + # Setup mock stub + mock_stub = MagicMock() + mock_stub.return_value = {"result": "test"} + mock_channel.unary_unary.return_value = mock_stub + + transport = GrpcTransport("localhost:50051") + + # Create test method info with type mocks + input_type = type("TestInput", (), {"SerializeToString": lambda _: b"test"}) + output_type = type( + "TestOutput", (), {"FromString": classmethod(lambda _, x: {"parsed": x})} + ) + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=input_type, + output=output_type, + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test unary call + request = Mock(SerializeToString=lambda: b"test_request") + call_options = CallOptions(headers={"x-test": "header"}, timeout_ms=1000) + + result = transport.unary_unary(method, request, call_options) + + # Verify stub was called + mock_stub.assert_called_once_with( + request, metadata=[("x-test", "header")], timeout=1.0 + ) + assert result == {"result": "test"} + + +class TestCreateClientSync(unittest.TestCase): + """Test the create_client_sync function.""" + + @patch("importlib.import_module") + def test_create_client_with_connect_transport(self, mock_import): + """Test creating a client with ConnectTransport.""" + # Setup mock module with client class + mock_module = MagicMock() + mock_client_class = MagicMock() + mock_module.TestServiceClientSync = mock_client_class + mock_import.return_value = mock_module + + # Create mock service class + service_class = type( + "TestService", (), {"__module__": "test.module", "__name__": "TestService"} + ) + + # Create transport + transport = ConnectTransport("http://localhost:3000") + + # Create client + client = create_client_sync(service_class, transport) # noqa: F841 + + # Verify client was created with correct parameters + mock_client_class.assert_called_once_with( + address="http://localhost:3000", + proto_json=False, + accept_compression=None, + send_compression=None, + timeout_ms=None, + read_max_bytes=None, + interceptors=(), + session=None, + ) + + @patch("importlib.import_module") + @patch("connecpy.transport.client.grpc.GRPC_AVAILABLE", True) + @patch("connecpy.transport.client.grpc.grpc") + def test_create_client_with_grpc_transport(self, mock_grpc, mock_import): + """Test creating a client with GrpcTransport.""" + # Setup mock channel + mock_channel = MagicMock() + mock_grpc.insecure_channel.return_value = mock_channel + + # Setup mock service module with wrapper class + mock_service_module = MagicMock() + mock_wrapper_class = MagicMock() + mock_wrapper_instance = MagicMock() + mock_wrapper_class.return_value = mock_wrapper_instance + mock_service_module.TestServiceGrpcWrapperSync = mock_wrapper_class + + # Setup mock grpc module with stub class + mock_grpc_module = MagicMock() + mock_stub_class = MagicMock() + mock_stub_instance = MagicMock() + mock_stub_class.return_value = mock_stub_instance + mock_grpc_module.TestServiceStub = mock_stub_class + + # Mock the import to return appropriate modules + def import_side_effect(name): + if name.endswith("_pb2_grpc"): + return mock_grpc_module + return mock_service_module + + mock_import.side_effect = import_side_effect + + # Create mock service class + service_class = type( + "TestService", + (), + {"__module__": "test.module.service_connecpy", "__name__": "TestService"}, + ) + + # Create transport + transport = GrpcTransport("localhost:50051") + + # Create client + client = create_client_sync(service_class, transport) + + # Verify stub was created with the channel + mock_stub_class.assert_called_once_with(mock_channel) + + # Verify wrapper was created with the stub + mock_wrapper_class.assert_called_once_with(mock_stub_instance) + + # Verify we got the wrapper instance + assert client == mock_wrapper_instance + + +if __name__ == "__main__": + unittest.main() diff --git a/test/transport_api/test_transport_retry_async.py b/test/transport_api/test_transport_retry_async.py new file mode 100644 index 0000000..89a0c73 --- /dev/null +++ b/test/transport_api/test_transport_retry_async.py @@ -0,0 +1,276 @@ +"""Tests for async retry policies in the Transport API.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from connecpy.code import Code +from connecpy.exceptions import ConnecpyException +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import CallOptions, ConnectTransportAsync, RetryPolicy + + +@pytest.mark.asyncio +async def test_async_retry_policy_defaults(): + """Test RetryPolicy with default values in async transport.""" + policy = RetryPolicy() + assert policy.max_attempts == 3 + assert policy.initial_backoff_ms == 100 + assert policy.max_backoff_ms == 5000 + assert policy.backoff_multiplier == 2.0 + assert policy.retryable_codes == [Code.UNAVAILABLE, Code.DEADLINE_EXCEEDED] + + +@pytest.mark.asyncio +async def test_async_retry_success_after_failures(): + """Test async retry succeeds after transient failures.""" + transport = ConnectTransportAsync("https://example.com") + policy = RetryPolicy(max_attempts=3, initial_backoff_ms=10) + + attempt_count = 0 + + async def failing_func(): + nonlocal attempt_count + attempt_count += 1 + if attempt_count < 3: + raise ConnecpyException(Code.UNAVAILABLE, "Service unavailable") + return "success" + + # Test the retry mechanism + result = await transport._execute_with_retry(failing_func, policy) + assert result == "success" + assert attempt_count == 3 + + +@pytest.mark.asyncio +async def test_async_retry_non_retryable_error(): + """Test async retry fails immediately on non-retryable errors.""" + transport = ConnectTransportAsync("https://example.com") + policy = RetryPolicy(max_attempts=3, initial_backoff_ms=10) + + attempt_count = 0 + + async def non_retryable_func(): + nonlocal attempt_count + attempt_count += 1 + raise ConnecpyException(Code.INVALID_ARGUMENT, "Bad request") + + with pytest.raises(ConnecpyException) as exc_info: + await transport._execute_with_retry(non_retryable_func, policy) + + assert exc_info.value.code == Code.INVALID_ARGUMENT + assert attempt_count == 1 # Should not retry + + +@pytest.mark.asyncio +async def test_async_retry_max_attempts_exceeded(): + """Test async retry fails when max attempts are exceeded.""" + transport = ConnectTransportAsync("https://example.com") + policy = RetryPolicy(max_attempts=2, initial_backoff_ms=10) + + attempt_count = 0 + + async def always_failing_func(): + nonlocal attempt_count + attempt_count += 1 + raise ConnecpyException(Code.UNAVAILABLE, f"Attempt {attempt_count}") + + with pytest.raises(ConnecpyException) as exc_info: + await transport._execute_with_retry(always_failing_func, policy) + + assert exc_info.value.code == Code.UNAVAILABLE + assert attempt_count == 2 # Should have tried max_attempts times + + +@pytest.mark.asyncio +async def test_async_retry_exponential_backoff(): + """Test async retry uses exponential backoff correctly.""" + transport = ConnectTransportAsync("https://example.com") + policy = RetryPolicy( + max_attempts=4, + initial_backoff_ms=100, + max_backoff_ms=1000, + backoff_multiplier=2.0, + ) + + backoff_times = [] + attempt_count = 0 + + async def track_backoff_func(): + nonlocal attempt_count + attempt_count += 1 + if attempt_count < 4: + raise ConnecpyException(Code.UNAVAILABLE, "Service unavailable") + return "success" + + # Patch asyncio.sleep to track backoff times + with patch("asyncio.sleep", new_callable=AsyncMock) as mock_sleep: + + async def track_sleep(seconds): + backoff_times.append(seconds * 1000) # Convert to ms + + mock_sleep.side_effect = track_sleep + + result = await transport._execute_with_retry(track_backoff_func, policy) + + assert result == "success" + assert len(backoff_times) == 3 # 3 retries before success + + # Check exponential backoff + assert backoff_times[0] == 100 # initial_backoff_ms + assert backoff_times[1] == 200 # 100 * 2 + assert backoff_times[2] == 400 # 200 * 2 + + +@pytest.mark.asyncio +async def test_async_retry_backoff_max_limit(): + """Test async retry respects max backoff limit.""" + transport = ConnectTransportAsync("https://example.com") + policy = RetryPolicy( + max_attempts=5, + initial_backoff_ms=100, + max_backoff_ms=300, # Low max to test capping + backoff_multiplier=3.0, + ) + + backoff_times = [] + + async def always_failing(): + raise ConnecpyException(Code.UNAVAILABLE, "Service unavailable") + + with patch("asyncio.sleep", new_callable=AsyncMock) as mock_sleep: + + async def track_sleep(seconds): + backoff_times.append(seconds * 1000) # Convert to ms + + mock_sleep.side_effect = track_sleep + + with pytest.raises(ConnecpyException): + await transport._execute_with_retry(always_failing, policy) + + assert len(backoff_times) == 4 # 4 retries before giving up + assert backoff_times[0] == 100 # initial_backoff_ms + assert backoff_times[1] == 300 # 100 * 3, but capped at max_backoff_ms + assert backoff_times[2] == 300 # Capped at max_backoff_ms + assert backoff_times[3] == 300 # Capped at max_backoff_ms + + +@pytest.mark.asyncio +async def test_async_retry_custom_retryable_codes(): + """Test async retry with custom retryable error codes.""" + transport = ConnectTransportAsync("https://example.com") + policy = RetryPolicy( + max_attempts=3, + initial_backoff_ms=10, + retryable_codes=[Code.INTERNAL, Code.UNKNOWN], # Custom codes + ) + + # Test retryable error + attempt_count = 0 + + async def internal_error_func(): + nonlocal attempt_count + attempt_count += 1 + if attempt_count < 2: + raise ConnecpyException(Code.INTERNAL, "Internal error") + return "recovered" + + result = await transport._execute_with_retry(internal_error_func, policy) + assert result == "recovered" + assert attempt_count == 2 + + # Test non-retryable error (UNAVAILABLE not in custom list) + async def unavailable_func(): + raise ConnecpyException(Code.UNAVAILABLE, "Service unavailable") + + with pytest.raises(ConnecpyException) as exc_info: + await transport._execute_with_retry(unavailable_func, policy) + + assert exc_info.value.code == Code.UNAVAILABLE + + +@pytest.mark.asyncio +async def test_async_unary_with_retry_via_call_options(): + """Test async unary RPC with retry policy via CallOptions.""" + transport = ConnectTransportAsync("https://example.com") + + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Mock the underlying client + with patch.object(transport, "_client") as mock_client: + attempt_count = 0 + + async def failing_execute(*args, **kwargs): # noqa: ARG001 + nonlocal attempt_count + attempt_count += 1 + if attempt_count < 3: + raise ConnecpyException(Code.UNAVAILABLE, "Temporary failure") + return {"result": "success"} + + mock_client.execute_unary = AsyncMock(side_effect=failing_execute) + + # Call with retry policy + call_options = CallOptions( + retry_policy=RetryPolicy(max_attempts=3, initial_backoff_ms=10) + ) + + with patch("asyncio.sleep", new_callable=AsyncMock): # Skip actual sleep + result = await transport.unary_unary(method, {"test": "data"}, call_options) + + assert result == {"result": "success"} + assert attempt_count == 3 + + +@pytest.mark.asyncio +async def test_async_stream_unary_with_retry(): + """Test async stream-unary RPC with retry policy.""" + transport = ConnectTransportAsync("https://example.com") + + method = MethodInfo( + name="StreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Mock the underlying client + with patch.object(transport, "_client") as mock_client: + attempt_count = 0 + + async def failing_stream_execute(*args, **kwargs): # noqa: ARG001 + nonlocal attempt_count + attempt_count += 1 + if attempt_count < 2: + raise ConnecpyException(Code.DEADLINE_EXCEEDED, "Timeout") + return {"aggregated": "result"} + + mock_client.execute_client_stream = AsyncMock( + side_effect=failing_stream_execute + ) + + # Create an async stream + async def input_stream(): + for i in range(3): + yield {"data": i} + + # Call with retry policy + call_options = CallOptions( + retry_policy=RetryPolicy( + max_attempts=2, + initial_backoff_ms=10, + retryable_codes=[Code.DEADLINE_EXCEEDED], + ) + ) + + with patch("asyncio.sleep", new_callable=AsyncMock): # Skip actual sleep + result = await transport.stream_unary(method, input_stream(), call_options) + + assert result == {"aggregated": "result"} + assert attempt_count == 2 diff --git a/test/transport_api/test_transport_streaming.py b/test/transport_api/test_transport_streaming.py new file mode 100644 index 0000000..5315903 --- /dev/null +++ b/test/transport_api/test_transport_streaming.py @@ -0,0 +1,230 @@ +"""Tests for streaming RPCs with the Transport API.""" + +import unittest +from unittest.mock import AsyncMock, MagicMock, patch + +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import ConnectTransport, ConnectTransportAsync + + +class TestConnectTransportStreaming(unittest.TestCase): + """Test Connect transport with streaming RPCs.""" + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_unary_stream(self, mock_client_class): + """Test unary-stream RPC.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + # Mock the execute_server_stream method to return an iterator + mock_stream = iter([{"result": 1}, {"result": 2}, {"result": 3}]) + mock_client.execute_server_stream.return_value = mock_stream + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="StreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Test unary-stream call + request = {"test": "data"} + result_stream = transport.unary_stream(method, request) + + # Consume the stream + results = list(result_stream) + + # Verify the client was called correctly + mock_client.execute_server_stream.assert_called_once() + assert len(results) == 3 + assert results[0] == {"result": 1} + assert results[1] == {"result": 2} + assert results[2] == {"result": 3} + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_stream_unary(self, mock_client_class): + """Test stream-unary RPC.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + mock_client.execute_client_stream.return_value = {"combined": "result"} + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="ClientStreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test stream-unary call + request_stream = iter([{"data": 1}, {"data": 2}, {"data": 3}]) + result = transport.stream_unary(method, request_stream) + + # Verify the client was called correctly + mock_client.execute_client_stream.assert_called_once() + assert result == {"combined": "result"} + + @patch("connecpy.transport.client.connect.ConnecpyClientSync") + def test_stream_stream(self, mock_client_class): + """Test stream-stream RPC.""" + # Setup mock client + mock_client = MagicMock() + mock_client_class.return_value = mock_client + + # Mock the execute_bidi_stream method to return an iterator + mock_response_stream = iter([{"response": 1}, {"response": 2}]) + mock_client.execute_bidi_stream.return_value = mock_response_stream + + transport = ConnectTransport("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="BidiStreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test stream-stream call + request_stream = iter([{"request": 1}, {"request": 2}]) + result_stream = transport.stream_stream(method, request_stream) + + # Consume the stream + results = list(result_stream) + + # Verify the client was called correctly + mock_client.execute_bidi_stream.assert_called_once() + assert len(results) == 2 + assert results[0] == {"response": 1} + assert results[1] == {"response": 2} + + +class TestConnectTransportAsyncStreaming(unittest.IsolatedAsyncioTestCase): + """Test async Connect transport with streaming RPCs.""" + + @patch("connecpy.transport.client.connect_async.ConnecpyClient") + async def test_unary_stream_async(self, mock_client_class): + """Test async unary-stream RPC.""" + # Setup mock client + mock_client = MagicMock() # Use MagicMock instead of AsyncMock + mock_client_class.return_value = mock_client + + # Create an async generator for the response stream + async def mock_stream(): + for i in range(3): + yield {"result": i + 1} + + mock_client.execute_server_stream.return_value = mock_stream() + + transport = ConnectTransportAsync("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="StreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Test unary-stream call + request = {"test": "data"} + result_stream = transport.unary_stream(method, request) + + # Consume the stream + results = [] + async for result in result_stream: + results.append(result) + + # Verify the client was called correctly + mock_client.execute_server_stream.assert_called_once() + assert len(results) == 3 + assert results[0] == {"result": 1} + + @patch("connecpy.transport.client.connect_async.ConnecpyClient") + async def test_stream_unary_async(self, mock_client_class): + """Test async stream-unary RPC.""" + # Setup mock client + mock_client = ( + AsyncMock() + ) # Keep AsyncMock for stream_unary since it returns a single value + mock_client_class.return_value = mock_client + mock_client.execute_client_stream.return_value = {"combined": "result"} + + transport = ConnectTransportAsync("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="ClientStreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test stream-unary call + async def request_stream(): + for i in range(3): + yield {"data": i + 1} + + result = await transport.stream_unary(method, request_stream()) + + # Verify the client was called correctly + mock_client.execute_client_stream.assert_called_once() + assert result == {"combined": "result"} + + @patch("connecpy.transport.client.connect_async.ConnecpyClient") + async def test_stream_stream_async(self, mock_client_class): + """Test async stream-stream RPC.""" + # Setup mock client + mock_client = MagicMock() # Use MagicMock instead of AsyncMock + mock_client_class.return_value = mock_client + + # Create an async generator for the response stream + async def mock_response_stream(): + for i in range(2): + yield {"response": i + 1} + + mock_client.execute_bidi_stream.return_value = mock_response_stream() + + transport = ConnectTransportAsync("http://localhost:3000") + + # Create test method info + method = MethodInfo( + name="BidiStreamMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.UNKNOWN, + ) + + # Test stream-stream call + async def request_stream(): + for i in range(2): + yield {"request": i + 1} + + result_stream = transport.stream_stream(method, request_stream()) + + # Consume the stream + results = [] + async for result in result_stream: + results.append(result) + + # Verify the client was called correctly + mock_client.execute_bidi_stream.assert_called_once() + assert len(results) == 2 + assert results[0] == {"response": 1} + + +if __name__ == "__main__": + unittest.main() diff --git a/test/transport_api/test_transport_timeout.py b/test/transport_api/test_transport_timeout.py new file mode 100644 index 0000000..8c15e3c --- /dev/null +++ b/test/transport_api/test_transport_timeout.py @@ -0,0 +1,227 @@ +"""Tests for timeout validation in the Transport API.""" + +import pytest + +from connecpy.method import IdempotencyLevel, MethodInfo +from connecpy.transport.client import ( + CallOptions, + ConnectTransport, + ConnectTransportAsync, + GrpcTransport, + GrpcTransportAsync, +) + + +def test_connect_timeout_validation(): + """Test timeout validation for Connect transport.""" + transport = ConnectTransport("http://localhost:3000") + + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Test valid timeout values + # 100 days in milliseconds = 8,640,000,000 ms + valid_timeouts = [ + 1000, # 1 second + 60000, # 1 minute + 3600000, # 1 hour + 86400000, # 1 day + 8640000000, # 100 days (max allowed) + ] + + for timeout_ms in valid_timeouts: + call_options = CallOptions(timeout_ms=timeout_ms) + # Should not raise any exception + try: + # We need to mock the actual call to avoid network errors + from unittest.mock import patch # noqa: PLC0415 # noqa: PLC0415 + + with patch.object(transport._client, "execute_unary") as mock: + mock.return_value = {"result": "ok"} + transport.unary_unary(method, {"test": "data"}, call_options) + except ValueError as e: + if "Timeout" in str(e): + pytest.fail( + f"Timeout {timeout_ms}ms should be valid but got error: {e}" + ) + + # Test invalid timeout values + invalid_timeouts = [ + 0, # Zero timeout + -1000, # Negative timeout + 8640000001, # Just over 100 days + 10000000000, # Way over 100 days + ] + + for timeout_ms in invalid_timeouts: + call_options = CallOptions(timeout_ms=timeout_ms) + with pytest.raises(ValueError, match="Timeout"): + # Mock to isolate timeout validation + from unittest.mock import patch # noqa: PLC0415 # noqa: PLC0415 + + with patch.object(transport._client, "execute_unary") as mock: + mock.return_value = {"result": "ok"} + transport.unary_unary(method, {"test": "data"}, call_options) + + +@pytest.mark.asyncio +async def test_connect_async_timeout_validation(): + """Test timeout validation for async Connect transport.""" + transport = ConnectTransportAsync("http://localhost:3000") + + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Test that 100 days is allowed + call_options = CallOptions(timeout_ms=8640000000) # 100 days + + from unittest.mock import AsyncMock, patch # noqa: PLC0415 + + with patch.object( + transport._client, "execute_unary", new_callable=AsyncMock + ) as mock: + mock.return_value = {"result": "ok"} + result = await transport.unary_unary(method, {"test": "data"}, call_options) + assert result == {"result": "ok"} + + # Test that just over 100 days is rejected + call_options = CallOptions(timeout_ms=8640000001) + + with pytest.raises(ValueError, match="max is 100 days"): # noqa: SIM117 + with patch.object( + transport._client, "execute_unary", new_callable=AsyncMock + ) as mock: + mock.return_value = {"result": "ok"} + await transport.unary_unary(method, {"test": "data"}, call_options) + + +def test_grpc_timeout_validation(): + """Test timeout validation for gRPC transport.""" + # Skip if grpc not available + pytest.importorskip("grpc") + + transport = GrpcTransport("localhost:50051") + + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Test 100 days timeout + call_options = CallOptions(timeout_ms=8640000000) + + from unittest.mock import MagicMock, patch # noqa: PLC0415 + + with patch.object(transport, "_get_or_create_stub") as mock_stub: + mock_call = MagicMock(return_value={"result": "ok"}) + mock_stub.return_value = mock_call + + # Should not raise ValueError for timeout + result = transport.unary_unary(method, {"test": "data"}, call_options) + assert result == {"result": "ok"} + + # Check that the timeout was converted correctly (ms to seconds) + mock_call.assert_called_once() + _, kwargs = mock_call.call_args + assert kwargs["timeout"] == 8640000.0 # 100 days in seconds + + # Test invalid timeout (over 100 days) + call_options = CallOptions(timeout_ms=8640000001) + + with pytest.raises(ValueError, match="max is 100 days"): # noqa: SIM117 + with patch.object(transport, "_get_or_create_stub") as mock_stub: + mock_stub.return_value = MagicMock() + transport.unary_unary(method, {"test": "data"}, call_options) + + +@pytest.mark.asyncio +async def test_grpc_async_timeout_validation(): + """Test timeout validation for async gRPC transport.""" + # Skip if grpc not available + pytest.importorskip("grpc") + + transport = GrpcTransportAsync("localhost:50051") + + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # Test valid 50-day timeout (within 100-day limit) + call_options = CallOptions(timeout_ms=4320000000) # 50 days + + from unittest.mock import AsyncMock, patch # noqa: PLC0415 + + with patch.object(transport, "_get_or_create_stub") as mock_stub: + mock_call = AsyncMock(return_value={"result": "ok"}) + mock_stub.return_value = mock_call + + result = await transport.unary_unary(method, {"test": "data"}, call_options) + assert result == {"result": "ok"} + + # Verify timeout was passed correctly + mock_call.assert_called_once() + _, kwargs = mock_call.call_args + assert kwargs["timeout"] == 4320000.0 # 50 days in seconds + + # Test edge case: exactly 100 days + call_options = CallOptions(timeout_ms=8640000000) + + with patch.object(transport, "_get_or_create_stub") as mock_stub: + mock_call = AsyncMock(return_value={"result": "ok"}) + mock_stub.return_value = mock_call + + result = await transport.unary_unary(method, {"test": "data"}, call_options) + assert result == {"result": "ok"} + + # Test negative timeout + call_options = CallOptions(timeout_ms=-1000) + + with pytest.raises(ValueError, match="must be positive"): # noqa: SIM117 + with patch.object(transport, "_get_or_create_stub") as mock_stub: + mock_stub.return_value = AsyncMock() + await transport.unary_unary(method, {"test": "data"}, call_options) + + +def test_none_timeout_means_infinite(): + """Test that None timeout means infinite timeout (no timeout).""" + transport = ConnectTransport("http://localhost:3000") + + method = MethodInfo( + name="TestMethod", + service_name="TestService", + input=type("TestInput", (), {}), + output=type("TestOutput", (), {}), + idempotency_level=IdempotencyLevel.NO_SIDE_EFFECTS, + ) + + # CallOptions with no timeout specified + call_options = CallOptions() + assert call_options.timeout_ms is None + + from unittest.mock import patch # noqa: PLC0415 + + with patch.object(transport._client, "execute_unary") as mock: + mock.return_value = {"result": "ok"} + transport.unary_unary(method, {"test": "data"}, call_options) + + # Verify that None was passed as timeout (infinite timeout) + mock.assert_called_once() + _, kwargs = mock.call_args + assert kwargs.get("timeout_ms") is None diff --git a/uv.lock b/uv.lock index 84c2207..b6cf477 100644 --- a/uv.lock +++ b/uv.lock @@ -251,6 +251,14 @@ dependencies = [ { name = "protobuf" }, ] +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, +] +types = [ + { name = "types-grpcio" }, +] + [package.dev-dependencies] dev = [ { name = "asgiref" }, @@ -268,15 +276,19 @@ dev = [ { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "ruff" }, + { name = "types-grpcio" }, { name = "typing-extensions" }, { name = "zstandard" }, ] [package.metadata] requires-dist = [ + { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.50.0" }, { name = "httpx" }, { name = "protobuf", specifier = ">=5.28" }, + { name = "types-grpcio", marker = "extra == 'types'" }, ] +provides-extras = ["grpc", "types"] [package.metadata.requires-dev] dev = [ @@ -295,6 +307,7 @@ dev = [ { name = "pytest-asyncio", specifier = ">=0.25.2" }, { name = "pytest-cov" }, { name = "ruff", specifier = ">=0.9.4" }, + { name = "types-grpcio" }, { name = "typing-extensions" }, { name = "zstandard" }, ] @@ -368,87 +381,87 @@ requires-dist = [ [[package]] name = "coverage" -version = "7.10.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/2c/253cc41cd0f40b84c1c34c5363e0407d73d4a1cae005fed6db3b823175bd/coverage-7.10.3.tar.gz", hash = "sha256:812ba9250532e4a823b070b0420a36499859542335af3dca8f47fc6aa1a05619", size = 822936, upload-time = "2025-08-10T21:27:39.968Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/44/e14576c34b37764c821866909788ff7463228907ab82bae188dab2b421f1/coverage-7.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53808194afdf948c462215e9403cca27a81cf150d2f9b386aee4dab614ae2ffe", size = 215964, upload-time = "2025-08-10T21:25:22.828Z" }, - { url = "https://files.pythonhosted.org/packages/e6/15/f4f92d9b83100903efe06c9396ee8d8bdba133399d37c186fc5b16d03a87/coverage-7.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f4d1b837d1abf72187a61645dbf799e0d7705aa9232924946e1f57eb09a3bf00", size = 216361, upload-time = "2025-08-10T21:25:25.603Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3a/c92e8cd5e89acc41cfc026dfb7acedf89661ce2ea1ee0ee13aacb6b2c20c/coverage-7.10.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2a90dd4505d3cc68b847ab10c5ee81822a968b5191664e8a0801778fa60459fa", size = 243115, upload-time = "2025-08-10T21:25:27.09Z" }, - { url = "https://files.pythonhosted.org/packages/23/53/c1d8c2778823b1d95ca81701bb8f42c87dc341a2f170acdf716567523490/coverage-7.10.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d52989685ff5bf909c430e6d7f6550937bc6d6f3e6ecb303c97a86100efd4596", size = 244927, upload-time = "2025-08-10T21:25:28.77Z" }, - { url = "https://files.pythonhosted.org/packages/79/41/1e115fd809031f432b4ff8e2ca19999fb6196ab95c35ae7ad5e07c001130/coverage-7.10.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdb558a1d97345bde3a9f4d3e8d11c9e5611f748646e9bb61d7d612a796671b5", size = 246784, upload-time = "2025-08-10T21:25:30.195Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b2/0eba9bdf8f1b327ae2713c74d4b7aa85451bb70622ab4e7b8c000936677c/coverage-7.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c9e6331a8f09cb1fc8bda032752af03c366870b48cce908875ba2620d20d0ad4", size = 244828, upload-time = "2025-08-10T21:25:31.785Z" }, - { url = "https://files.pythonhosted.org/packages/1f/cc/74c56b6bf71f2a53b9aa3df8bc27163994e0861c065b4fe3a8ac290bed35/coverage-7.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:992f48bf35b720e174e7fae916d943599f1a66501a2710d06c5f8104e0756ee1", size = 242844, upload-time = "2025-08-10T21:25:33.37Z" }, - { url = "https://files.pythonhosted.org/packages/b6/7b/ac183fbe19ac5596c223cb47af5737f4437e7566100b7e46cc29b66695a5/coverage-7.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c5595fc4ad6a39312c786ec3326d7322d0cf10e3ac6a6df70809910026d67cfb", size = 243721, upload-time = "2025-08-10T21:25:34.939Z" }, - { url = "https://files.pythonhosted.org/packages/57/96/cb90da3b5a885af48f531905234a1e7376acfc1334242183d23154a1c285/coverage-7.10.3-cp310-cp310-win32.whl", hash = "sha256:9e92fa1f2bd5a57df9d00cf9ce1eb4ef6fccca4ceabec1c984837de55329db34", size = 218481, upload-time = "2025-08-10T21:25:36.935Z" }, - { url = "https://files.pythonhosted.org/packages/15/67/1ba4c7d75745c4819c54a85766e0a88cc2bff79e1760c8a2debc34106dc2/coverage-7.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b96524d6e4a3ce6a75c56bb15dbd08023b0ae2289c254e15b9fbdddf0c577416", size = 219382, upload-time = "2025-08-10T21:25:38.267Z" }, - { url = "https://files.pythonhosted.org/packages/87/04/810e506d7a19889c244d35199cbf3239a2f952b55580aa42ca4287409424/coverage-7.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2ff2e2afdf0d51b9b8301e542d9c21a8d084fd23d4c8ea2b3a1b3c96f5f7397", size = 216075, upload-time = "2025-08-10T21:25:39.891Z" }, - { url = "https://files.pythonhosted.org/packages/2e/50/6b3fbab034717b4af3060bdaea6b13dfdc6b1fad44b5082e2a95cd378a9a/coverage-7.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:18ecc5d1b9a8c570f6c9b808fa9a2b16836b3dd5414a6d467ae942208b095f85", size = 216476, upload-time = "2025-08-10T21:25:41.137Z" }, - { url = "https://files.pythonhosted.org/packages/c7/96/4368c624c1ed92659812b63afc76c492be7867ac8e64b7190b88bb26d43c/coverage-7.10.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1af4461b25fe92889590d438905e1fc79a95680ec2a1ff69a591bb3fdb6c7157", size = 246865, upload-time = "2025-08-10T21:25:42.408Z" }, - { url = "https://files.pythonhosted.org/packages/34/12/5608f76070939395c17053bf16e81fd6c06cf362a537ea9d07e281013a27/coverage-7.10.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3966bc9a76b09a40dc6063c8b10375e827ea5dfcaffae402dd65953bef4cba54", size = 248800, upload-time = "2025-08-10T21:25:44.098Z" }, - { url = "https://files.pythonhosted.org/packages/ce/52/7cc90c448a0ad724283cbcdfd66b8d23a598861a6a22ac2b7b8696491798/coverage-7.10.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:205a95b87ef4eb303b7bc5118b47b6b6604a644bcbdb33c336a41cfc0a08c06a", size = 250904, upload-time = "2025-08-10T21:25:45.384Z" }, - { url = "https://files.pythonhosted.org/packages/e6/70/9967b847063c1c393b4f4d6daab1131558ebb6b51f01e7df7150aa99f11d/coverage-7.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b3801b79fb2ad61e3c7e2554bab754fc5f105626056980a2b9cf3aef4f13f84", size = 248597, upload-time = "2025-08-10T21:25:47.059Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fe/263307ce6878b9ed4865af42e784b42bb82d066bcf10f68defa42931c2c7/coverage-7.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0dc69c60224cda33d384572da945759756e3f06b9cdac27f302f53961e63160", size = 246647, upload-time = "2025-08-10T21:25:48.334Z" }, - { url = "https://files.pythonhosted.org/packages/8e/27/d27af83ad162eba62c4eb7844a1de6cf7d9f6b185df50b0a3514a6f80ddd/coverage-7.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a83d4f134bab2c7ff758e6bb1541dd72b54ba295ced6a63d93efc2e20cb9b124", size = 247290, upload-time = "2025-08-10T21:25:49.945Z" }, - { url = "https://files.pythonhosted.org/packages/28/83/904ff27e15467a5622dbe9ad2ed5831b4a616a62570ec5924d06477dff5a/coverage-7.10.3-cp311-cp311-win32.whl", hash = "sha256:54e409dd64e5302b2a8fdf44ec1c26f47abd1f45a2dcf67bd161873ee05a59b8", size = 218521, upload-time = "2025-08-10T21:25:51.208Z" }, - { url = "https://files.pythonhosted.org/packages/b8/29/bc717b8902faaccf0ca486185f0dcab4778561a529dde51cb157acaafa16/coverage-7.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:30c601610a9b23807c5e9e2e442054b795953ab85d525c3de1b1b27cebeb2117", size = 219412, upload-time = "2025-08-10T21:25:52.494Z" }, - { url = "https://files.pythonhosted.org/packages/7b/7a/5a1a7028c11bb589268c656c6b3f2bbf06e0aced31bbdf7a4e94e8442cc0/coverage-7.10.3-cp311-cp311-win_arm64.whl", hash = "sha256:dabe662312a97958e932dee056f2659051d822552c0b866823e8ba1c2fe64770", size = 218091, upload-time = "2025-08-10T21:25:54.102Z" }, - { url = "https://files.pythonhosted.org/packages/b8/62/13c0b66e966c43d7aa64dadc8cd2afa1f5a2bf9bb863bdabc21fb94e8b63/coverage-7.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:449c1e2d3a84d18bd204258a897a87bc57380072eb2aded6a5b5226046207b42", size = 216262, upload-time = "2025-08-10T21:25:55.367Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f0/59fdf79be7ac2f0206fc739032f482cfd3f66b18f5248108ff192741beae/coverage-7.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d4f9ce50b9261ad196dc2b2e9f1fbbee21651b54c3097a25ad783679fd18294", size = 216496, upload-time = "2025-08-10T21:25:56.759Z" }, - { url = "https://files.pythonhosted.org/packages/34/b1/bc83788ba31bde6a0c02eb96bbc14b2d1eb083ee073beda18753fa2c4c66/coverage-7.10.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4dd4564207b160d0d45c36a10bc0a3d12563028e8b48cd6459ea322302a156d7", size = 247989, upload-time = "2025-08-10T21:25:58.067Z" }, - { url = "https://files.pythonhosted.org/packages/0c/29/f8bdf88357956c844bd872e87cb16748a37234f7f48c721dc7e981145eb7/coverage-7.10.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5ca3c9530ee072b7cb6a6ea7b640bcdff0ad3b334ae9687e521e59f79b1d0437", size = 250738, upload-time = "2025-08-10T21:25:59.406Z" }, - { url = "https://files.pythonhosted.org/packages/ae/df/6396301d332b71e42bbe624670af9376f63f73a455cc24723656afa95796/coverage-7.10.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b6df359e59fa243c9925ae6507e27f29c46698359f45e568fd51b9315dbbe587", size = 251868, upload-time = "2025-08-10T21:26:00.65Z" }, - { url = "https://files.pythonhosted.org/packages/91/21/d760b2df6139b6ef62c9cc03afb9bcdf7d6e36ed4d078baacffa618b4c1c/coverage-7.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a181e4c2c896c2ff64c6312db3bda38e9ade2e1aa67f86a5628ae85873786cea", size = 249790, upload-time = "2025-08-10T21:26:02.009Z" }, - { url = "https://files.pythonhosted.org/packages/69/91/5dcaa134568202397fa4023d7066d4318dc852b53b428052cd914faa05e1/coverage-7.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a374d4e923814e8b72b205ef6b3d3a647bb50e66f3558582eda074c976923613", size = 247907, upload-time = "2025-08-10T21:26:03.757Z" }, - { url = "https://files.pythonhosted.org/packages/38/ed/70c0e871cdfef75f27faceada461206c1cc2510c151e1ef8d60a6fedda39/coverage-7.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:daeefff05993e5e8c6e7499a8508e7bd94502b6b9a9159c84fd1fe6bce3151cb", size = 249344, upload-time = "2025-08-10T21:26:05.11Z" }, - { url = "https://files.pythonhosted.org/packages/5f/55/c8a273ed503cedc07f8a00dcd843daf28e849f0972e4c6be4c027f418ad6/coverage-7.10.3-cp312-cp312-win32.whl", hash = "sha256:187ecdcac21f9636d570e419773df7bd2fda2e7fa040f812e7f95d0bddf5f79a", size = 218693, upload-time = "2025-08-10T21:26:06.534Z" }, - { url = "https://files.pythonhosted.org/packages/94/58/dd3cfb2473b85be0b6eb8c5b6d80b6fc3f8f23611e69ef745cef8cf8bad5/coverage-7.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:4a50ad2524ee7e4c2a95e60d2b0b83283bdfc745fe82359d567e4f15d3823eb5", size = 219501, upload-time = "2025-08-10T21:26:08.195Z" }, - { url = "https://files.pythonhosted.org/packages/56/af/7cbcbf23d46de6f24246e3f76b30df099d05636b30c53c158a196f7da3ad/coverage-7.10.3-cp312-cp312-win_arm64.whl", hash = "sha256:c112f04e075d3495fa3ed2200f71317da99608cbb2e9345bdb6de8819fc30571", size = 218135, upload-time = "2025-08-10T21:26:09.584Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ff/239e4de9cc149c80e9cc359fab60592365b8c4cbfcad58b8a939d18c6898/coverage-7.10.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b99e87304ffe0eb97c5308447328a584258951853807afdc58b16143a530518a", size = 216298, upload-time = "2025-08-10T21:26:10.973Z" }, - { url = "https://files.pythonhosted.org/packages/56/da/28717da68f8ba68f14b9f558aaa8f3e39ada8b9a1ae4f4977c8f98b286d5/coverage-7.10.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4af09c7574d09afbc1ea7da9dcea23665c01f3bc1b1feb061dac135f98ffc53a", size = 216546, upload-time = "2025-08-10T21:26:12.616Z" }, - { url = "https://files.pythonhosted.org/packages/de/bb/e1ade16b9e3f2d6c323faeb6bee8e6c23f3a72760a5d9af102ef56a656cb/coverage-7.10.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:488e9b50dc5d2aa9521053cfa706209e5acf5289e81edc28291a24f4e4488f46", size = 247538, upload-time = "2025-08-10T21:26:14.455Z" }, - { url = "https://files.pythonhosted.org/packages/ea/2f/6ae1db51dc34db499bfe340e89f79a63bd115fc32513a7bacdf17d33cd86/coverage-7.10.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:913ceddb4289cbba3a310704a424e3fb7aac2bc0c3a23ea473193cb290cf17d4", size = 250141, upload-time = "2025-08-10T21:26:15.787Z" }, - { url = "https://files.pythonhosted.org/packages/4f/ed/33efd8819895b10c66348bf26f011dd621e804866c996ea6893d682218df/coverage-7.10.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b1f91cbc78c7112ab84ed2a8defbccd90f888fcae40a97ddd6466b0bec6ae8a", size = 251415, upload-time = "2025-08-10T21:26:17.535Z" }, - { url = "https://files.pythonhosted.org/packages/26/04/cb83826f313d07dc743359c9914d9bc460e0798da9a0e38b4f4fabc207ed/coverage-7.10.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0bac054d45af7cd938834b43a9878b36ea92781bcb009eab040a5b09e9927e3", size = 249575, upload-time = "2025-08-10T21:26:18.921Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fd/ae963c7a8e9581c20fa4355ab8940ca272554d8102e872dbb932a644e410/coverage-7.10.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fe72cbdd12d9e0f4aca873fa6d755e103888a7f9085e4a62d282d9d5b9f7928c", size = 247466, upload-time = "2025-08-10T21:26:20.263Z" }, - { url = "https://files.pythonhosted.org/packages/99/e8/b68d1487c6af370b8d5ef223c6d7e250d952c3acfbfcdbf1a773aa0da9d2/coverage-7.10.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c1e2e927ab3eadd7c244023927d646e4c15c65bb2ac7ae3c3e9537c013700d21", size = 249084, upload-time = "2025-08-10T21:26:21.638Z" }, - { url = "https://files.pythonhosted.org/packages/66/4d/a0bcb561645c2c1e21758d8200443669d6560d2a2fb03955291110212ec4/coverage-7.10.3-cp313-cp313-win32.whl", hash = "sha256:24d0c13de473b04920ddd6e5da3c08831b1170b8f3b17461d7429b61cad59ae0", size = 218735, upload-time = "2025-08-10T21:26:23.009Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c3/78b4adddbc0feb3b223f62761e5f9b4c5a758037aaf76e0a5845e9e35e48/coverage-7.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:3564aae76bce4b96e2345cf53b4c87e938c4985424a9be6a66ee902626edec4c", size = 219531, upload-time = "2025-08-10T21:26:24.474Z" }, - { url = "https://files.pythonhosted.org/packages/70/1b/1229c0b2a527fa5390db58d164aa896d513a1fbb85a1b6b6676846f00552/coverage-7.10.3-cp313-cp313-win_arm64.whl", hash = "sha256:f35580f19f297455f44afcd773c9c7a058e52eb6eb170aa31222e635f2e38b87", size = 218162, upload-time = "2025-08-10T21:26:25.847Z" }, - { url = "https://files.pythonhosted.org/packages/fc/26/1c1f450e15a3bf3eaecf053ff64538a2612a23f05b21d79ce03be9ff5903/coverage-7.10.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07009152f497a0464ffdf2634586787aea0e69ddd023eafb23fc38267db94b84", size = 217003, upload-time = "2025-08-10T21:26:27.231Z" }, - { url = "https://files.pythonhosted.org/packages/29/96/4b40036181d8c2948454b458750960956a3c4785f26a3c29418bbbee1666/coverage-7.10.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd2ba5f0c7e7e8cc418be2f0c14c4d9e3f08b8fb8e4c0f83c2fe87d03eb655e", size = 217238, upload-time = "2025-08-10T21:26:28.83Z" }, - { url = "https://files.pythonhosted.org/packages/62/23/8dfc52e95da20957293fb94d97397a100e63095ec1e0ef5c09dd8c6f591a/coverage-7.10.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1ae22b97003c74186e034a93e4f946c75fad8c0ce8d92fbbc168b5e15ee2841f", size = 258561, upload-time = "2025-08-10T21:26:30.475Z" }, - { url = "https://files.pythonhosted.org/packages/59/95/00e7fcbeda3f632232f4c07dde226afe3511a7781a000aa67798feadc535/coverage-7.10.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eb329f1046888a36b1dc35504d3029e1dd5afe2196d94315d18c45ee380f67d5", size = 260735, upload-time = "2025-08-10T21:26:32.333Z" }, - { url = "https://files.pythonhosted.org/packages/9e/4c/f4666cbc4571804ba2a65b078ff0de600b0b577dc245389e0bc9b69ae7ca/coverage-7.10.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce01048199a91f07f96ca3074b0c14021f4fe7ffd29a3e6a188ac60a5c3a4af8", size = 262960, upload-time = "2025-08-10T21:26:33.701Z" }, - { url = "https://files.pythonhosted.org/packages/c1/a5/8a9e8a7b12a290ed98b60f73d1d3e5e9ced75a4c94a0d1a671ce3ddfff2a/coverage-7.10.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08b989a06eb9dfacf96d42b7fb4c9a22bafa370d245dc22fa839f2168c6f9fa1", size = 260515, upload-time = "2025-08-10T21:26:35.16Z" }, - { url = "https://files.pythonhosted.org/packages/86/11/bb59f7f33b2cac0c5b17db0d9d0abba9c90d9eda51a6e727b43bd5fce4ae/coverage-7.10.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:669fe0d4e69c575c52148511029b722ba8d26e8a3129840c2ce0522e1452b256", size = 258278, upload-time = "2025-08-10T21:26:36.539Z" }, - { url = "https://files.pythonhosted.org/packages/cc/22/3646f8903743c07b3e53fded0700fed06c580a980482f04bf9536657ac17/coverage-7.10.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3262d19092771c83f3413831d9904b1ccc5f98da5de4ffa4ad67f5b20c7aaf7b", size = 259408, upload-time = "2025-08-10T21:26:37.954Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5c/6375e9d905da22ddea41cd85c30994b8b6f6c02e44e4c5744b76d16b026f/coverage-7.10.3-cp313-cp313t-win32.whl", hash = "sha256:cc0ee4b2ccd42cab7ee6be46d8a67d230cb33a0a7cd47a58b587a7063b6c6b0e", size = 219396, upload-time = "2025-08-10T21:26:39.426Z" }, - { url = "https://files.pythonhosted.org/packages/33/3b/7da37fd14412b8c8b6e73c3e7458fef6b1b05a37f990a9776f88e7740c89/coverage-7.10.3-cp313-cp313t-win_amd64.whl", hash = "sha256:03db599f213341e2960430984e04cf35fb179724e052a3ee627a068653cf4a7c", size = 220458, upload-time = "2025-08-10T21:26:40.905Z" }, - { url = "https://files.pythonhosted.org/packages/28/cc/59a9a70f17edab513c844ee7a5c63cf1057041a84cc725b46a51c6f8301b/coverage-7.10.3-cp313-cp313t-win_arm64.whl", hash = "sha256:46eae7893ba65f53c71284585a262f083ef71594f05ec5c85baf79c402369098", size = 218722, upload-time = "2025-08-10T21:26:42.362Z" }, - { url = "https://files.pythonhosted.org/packages/2d/84/bb773b51a06edbf1231b47dc810a23851f2796e913b335a0fa364773b842/coverage-7.10.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:bce8b8180912914032785850d8f3aacb25ec1810f5f54afc4a8b114e7a9b55de", size = 216280, upload-time = "2025-08-10T21:26:44.132Z" }, - { url = "https://files.pythonhosted.org/packages/92/a8/4d8ca9c111d09865f18d56facff64d5fa076a5593c290bd1cfc5dceb8dba/coverage-7.10.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07790b4b37d56608536f7c1079bd1aa511567ac2966d33d5cec9cf520c50a7c8", size = 216557, upload-time = "2025-08-10T21:26:45.598Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b2/eb668bfc5060194bc5e1ccd6f664e8e045881cfee66c42a2aa6e6c5b26e8/coverage-7.10.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e79367ef2cd9166acedcbf136a458dfe9a4a2dd4d1ee95738fb2ee581c56f667", size = 247598, upload-time = "2025-08-10T21:26:47.081Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b0/9faa4ac62c8822219dd83e5d0e73876398af17d7305968aed8d1606d1830/coverage-7.10.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:419d2a0f769f26cb1d05e9ccbc5eab4cb5d70231604d47150867c07822acbdf4", size = 250131, upload-time = "2025-08-10T21:26:48.65Z" }, - { url = "https://files.pythonhosted.org/packages/4e/90/203537e310844d4bf1bdcfab89c1e05c25025c06d8489b9e6f937ad1a9e2/coverage-7.10.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee221cf244757cdc2ac882e3062ab414b8464ad9c884c21e878517ea64b3fa26", size = 251485, upload-time = "2025-08-10T21:26:50.368Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b2/9d894b26bc53c70a1fe503d62240ce6564256d6d35600bdb86b80e516e7d/coverage-7.10.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c2079d8cdd6f7373d628e14b3357f24d1db02c9dc22e6a007418ca7a2be0435a", size = 249488, upload-time = "2025-08-10T21:26:52.045Z" }, - { url = "https://files.pythonhosted.org/packages/b4/28/af167dbac5281ba6c55c933a0ca6675d68347d5aee39cacc14d44150b922/coverage-7.10.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:bd8df1f83c0703fa3ca781b02d36f9ec67ad9cb725b18d486405924f5e4270bd", size = 247419, upload-time = "2025-08-10T21:26:53.533Z" }, - { url = "https://files.pythonhosted.org/packages/f4/1c/9a4ddc9f0dcb150d4cd619e1c4bb39bcf694c6129220bdd1e5895d694dda/coverage-7.10.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6b4e25e0fa335c8aa26e42a52053f3786a61cc7622b4d54ae2dad994aa754fec", size = 248917, upload-time = "2025-08-10T21:26:55.11Z" }, - { url = "https://files.pythonhosted.org/packages/92/27/c6a60c7cbe10dbcdcd7fc9ee89d531dc04ea4c073800279bb269954c5a9f/coverage-7.10.3-cp314-cp314-win32.whl", hash = "sha256:d7c3d02c2866deb217dce664c71787f4b25420ea3eaf87056f44fb364a3528f5", size = 218999, upload-time = "2025-08-10T21:26:56.637Z" }, - { url = "https://files.pythonhosted.org/packages/36/09/a94c1369964ab31273576615d55e7d14619a1c47a662ed3e2a2fe4dee7d4/coverage-7.10.3-cp314-cp314-win_amd64.whl", hash = "sha256:9c8916d44d9e0fe6cdb2227dc6b0edd8bc6c8ef13438bbbf69af7482d9bb9833", size = 219801, upload-time = "2025-08-10T21:26:58.207Z" }, - { url = "https://files.pythonhosted.org/packages/23/59/f5cd2a80f401c01cf0f3add64a7b791b7d53fd6090a4e3e9ea52691cf3c4/coverage-7.10.3-cp314-cp314-win_arm64.whl", hash = "sha256:1007d6a2b3cf197c57105cc1ba390d9ff7f0bee215ced4dea530181e49c65ab4", size = 218381, upload-time = "2025-08-10T21:26:59.707Z" }, - { url = "https://files.pythonhosted.org/packages/73/3d/89d65baf1ea39e148ee989de6da601469ba93c1d905b17dfb0b83bd39c96/coverage-7.10.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ebc8791d346410d096818788877d675ca55c91db87d60e8f477bd41c6970ffc6", size = 217019, upload-time = "2025-08-10T21:27:01.242Z" }, - { url = "https://files.pythonhosted.org/packages/7d/7d/d9850230cd9c999ce3a1e600f85c2fff61a81c301334d7a1faa1a5ba19c8/coverage-7.10.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f4e4d8e75f6fd3c6940ebeed29e3d9d632e1f18f6fb65d33086d99d4d073241", size = 217237, upload-time = "2025-08-10T21:27:03.442Z" }, - { url = "https://files.pythonhosted.org/packages/36/51/b87002d417202ab27f4a1cd6bd34ee3b78f51b3ddbef51639099661da991/coverage-7.10.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:24581ed69f132b6225a31b0228ae4885731cddc966f8a33fe5987288bdbbbd5e", size = 258735, upload-time = "2025-08-10T21:27:05.124Z" }, - { url = "https://files.pythonhosted.org/packages/1c/02/1f8612bfcb46fc7ca64a353fff1cd4ed932bb6e0b4e0bb88b699c16794b8/coverage-7.10.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec151569ddfccbf71bac8c422dce15e176167385a00cd86e887f9a80035ce8a5", size = 260901, upload-time = "2025-08-10T21:27:06.68Z" }, - { url = "https://files.pythonhosted.org/packages/aa/3a/fe39e624ddcb2373908bd922756384bb70ac1c5009b0d1674eb326a3e428/coverage-7.10.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2ae8e7c56290b908ee817200c0b65929b8050bc28530b131fe7c6dfee3e7d86b", size = 263157, upload-time = "2025-08-10T21:27:08.398Z" }, - { url = "https://files.pythonhosted.org/packages/5e/89/496b6d5a10fa0d0691a633bb2b2bcf4f38f0bdfcbde21ad9e32d1af328ed/coverage-7.10.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb742309766d7e48e9eb4dc34bc95a424707bc6140c0e7d9726e794f11b92a0", size = 260597, upload-time = "2025-08-10T21:27:10.237Z" }, - { url = "https://files.pythonhosted.org/packages/b6/a6/8b5bf6a9e8c6aaeb47d5fe9687014148efc05c3588110246d5fdeef9b492/coverage-7.10.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:c65e2a5b32fbe1e499f1036efa6eb9cb4ea2bf6f7168d0e7a5852f3024f471b1", size = 258353, upload-time = "2025-08-10T21:27:11.773Z" }, - { url = "https://files.pythonhosted.org/packages/c3/6d/ad131be74f8afd28150a07565dfbdc86592fd61d97e2dc83383d9af219f0/coverage-7.10.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d48d2cb07d50f12f4f18d2bb75d9d19e3506c26d96fffabf56d22936e5ed8f7c", size = 259504, upload-time = "2025-08-10T21:27:13.254Z" }, - { url = "https://files.pythonhosted.org/packages/ec/30/fc9b5097092758cba3375a8cc4ff61774f8cd733bcfb6c9d21a60077a8d8/coverage-7.10.3-cp314-cp314t-win32.whl", hash = "sha256:dec0d9bc15ee305e09fe2cd1911d3f0371262d3cfdae05d79515d8cb712b4869", size = 219782, upload-time = "2025-08-10T21:27:14.736Z" }, - { url = "https://files.pythonhosted.org/packages/72/9b/27fbf79451b1fac15c4bda6ec6e9deae27cf7c0648c1305aa21a3454f5c4/coverage-7.10.3-cp314-cp314t-win_amd64.whl", hash = "sha256:424ea93a323aa0f7f01174308ea78bde885c3089ec1bef7143a6d93c3e24ef64", size = 220898, upload-time = "2025-08-10T21:27:16.297Z" }, - { url = "https://files.pythonhosted.org/packages/d1/cf/a32bbf92869cbf0b7c8b84325327bfc718ad4b6d2c63374fef3d58e39306/coverage-7.10.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f5983c132a62d93d71c9ef896a0b9bf6e6828d8d2ea32611f58684fba60bba35", size = 218922, upload-time = "2025-08-10T21:27:18.22Z" }, - { url = "https://files.pythonhosted.org/packages/84/19/e67f4ae24e232c7f713337f3f4f7c9c58afd0c02866fb07c7b9255a19ed7/coverage-7.10.3-py3-none-any.whl", hash = "sha256:416a8d74dc0adfd33944ba2f405897bab87b7e9e84a391e09d241956bd953ce1", size = 207921, upload-time = "2025-08-10T21:27:38.254Z" }, +version = "7.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662, upload-time = "2025-08-23T14:42:44.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/70/e77b0061a6c7157bfce645c6b9a715a08d4c86b3360a7b3252818080b817/coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801", size = 216774, upload-time = "2025-08-23T14:40:26.301Z" }, + { url = "https://files.pythonhosted.org/packages/91/08/2a79de5ecf37ee40f2d898012306f11c161548753391cec763f92647837b/coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a", size = 217175, upload-time = "2025-08-23T14:40:29.142Z" }, + { url = "https://files.pythonhosted.org/packages/64/57/0171d69a699690149a6ba6a4eb702814448c8d617cf62dbafa7ce6bfdf63/coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754", size = 243931, upload-time = "2025-08-23T14:40:30.735Z" }, + { url = "https://files.pythonhosted.org/packages/15/06/3a67662c55656702bd398a727a7f35df598eb11104fcb34f1ecbb070291a/coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33", size = 245740, upload-time = "2025-08-23T14:40:32.302Z" }, + { url = "https://files.pythonhosted.org/packages/00/f4/f8763aabf4dc30ef0d0012522d312f0b7f9fede6246a1f27dbcc4a1e523c/coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f", size = 247600, upload-time = "2025-08-23T14:40:33.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/31/6632219a9065e1b83f77eda116fed4c76fb64908a6a9feae41816dab8237/coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9", size = 245640, upload-time = "2025-08-23T14:40:35.248Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e2/3dba9b86037b81649b11d192bb1df11dde9a81013e434af3520222707bc8/coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3", size = 243659, upload-time = "2025-08-23T14:40:36.815Z" }, + { url = "https://files.pythonhosted.org/packages/02/b9/57170bd9f3e333837fc24ecc88bc70fbc2eb7ccfd0876854b0c0407078c3/coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879", size = 244537, upload-time = "2025-08-23T14:40:38.737Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1c/93ac36ef1e8b06b8d5777393a3a40cb356f9f3dab980be40a6941e443588/coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8", size = 219285, upload-time = "2025-08-23T14:40:40.342Z" }, + { url = "https://files.pythonhosted.org/packages/30/95/23252277e6e5fe649d6cd3ed3f35d2307e5166de4e75e66aa7f432abc46d/coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff", size = 220185, upload-time = "2025-08-23T14:40:42.026Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f2/336d34d2fc1291ca7c18eeb46f64985e6cef5a1a7ef6d9c23720c6527289/coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2", size = 216890, upload-time = "2025-08-23T14:40:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/92448b07cc1cf2b429d0ce635f59cf0c626a5d8de21358f11e92174ff2a6/coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f", size = 217287, upload-time = "2025-08-23T14:40:45.214Z" }, + { url = "https://files.pythonhosted.org/packages/96/ba/ad5b36537c5179c808d0ecdf6e4aa7630b311b3c12747ad624dcd43a9b6b/coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab", size = 247683, upload-time = "2025-08-23T14:40:46.791Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/fe3bbc8d097029d284b5fb305b38bb3404895da48495f05bff025df62770/coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c", size = 249614, upload-time = "2025-08-23T14:40:48.082Z" }, + { url = "https://files.pythonhosted.org/packages/69/9c/a1c89a8c8712799efccb32cd0a1ee88e452f0c13a006b65bb2271f1ac767/coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1", size = 251719, upload-time = "2025-08-23T14:40:49.349Z" }, + { url = "https://files.pythonhosted.org/packages/e9/be/5576b5625865aa95b5633315f8f4142b003a70c3d96e76f04487c3b5cc95/coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78", size = 249411, upload-time = "2025-08-23T14:40:50.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/e39a113d4209da0dbbc9385608cdb1b0726a4d25f78672dc51c97cfea80f/coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df", size = 247466, upload-time = "2025-08-23T14:40:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/40/cb/aebb2d8c9e3533ee340bea19b71c5b76605a0268aa49808e26fe96ec0a07/coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6", size = 248104, upload-time = "2025-08-23T14:40:54.064Z" }, + { url = "https://files.pythonhosted.org/packages/08/e6/26570d6ccce8ff5de912cbfd268e7f475f00597cb58da9991fa919c5e539/coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf", size = 219327, upload-time = "2025-08-23T14:40:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/79/79/5f48525e366e518b36e66167e3b6e5db6fd54f63982500c6a5abb9d3dfbd/coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50", size = 220213, upload-time = "2025-08-23T14:40:56.724Z" }, + { url = "https://files.pythonhosted.org/packages/40/3c/9058128b7b0bf333130c320b1eb1ae485623014a21ee196d68f7737f8610/coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82", size = 218893, upload-time = "2025-08-23T14:40:58.011Z" }, + { url = "https://files.pythonhosted.org/packages/27/8e/40d75c7128f871ea0fd829d3e7e4a14460cad7c3826e3b472e6471ad05bd/coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9", size = 217077, upload-time = "2025-08-23T14:40:59.329Z" }, + { url = "https://files.pythonhosted.org/packages/18/a8/f333f4cf3fb5477a7f727b4d603a2eb5c3c5611c7fe01329c2e13b23b678/coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b", size = 217310, upload-time = "2025-08-23T14:41:00.628Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2c/fbecd8381e0a07d1547922be819b4543a901402f63930313a519b937c668/coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c", size = 248802, upload-time = "2025-08-23T14:41:02.012Z" }, + { url = "https://files.pythonhosted.org/packages/3f/bc/1011da599b414fb6c9c0f34086736126f9ff71f841755786a6b87601b088/coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a", size = 251550, upload-time = "2025-08-23T14:41:03.438Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6f/b5c03c0c721c067d21bc697accc3642f3cef9f087dac429c918c37a37437/coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6", size = 252684, upload-time = "2025-08-23T14:41:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/f9/50/d474bc300ebcb6a38a1047d5c465a227605d6473e49b4e0d793102312bc5/coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a", size = 250602, upload-time = "2025-08-23T14:41:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/4a/2d/548c8e04249cbba3aba6bd799efdd11eee3941b70253733f5d355d689559/coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a", size = 248724, upload-time = "2025-08-23T14:41:08.429Z" }, + { url = "https://files.pythonhosted.org/packages/e2/96/a7c3c0562266ac39dcad271d0eec8fc20ab576e3e2f64130a845ad2a557b/coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34", size = 250158, upload-time = "2025-08-23T14:41:09.749Z" }, + { url = "https://files.pythonhosted.org/packages/f3/75/74d4be58c70c42ef0b352d597b022baf12dbe2b43e7cb1525f56a0fb1d4b/coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf", size = 219493, upload-time = "2025-08-23T14:41:11.095Z" }, + { url = "https://files.pythonhosted.org/packages/4f/08/364e6012d1d4d09d1e27437382967efed971d7613f94bca9add25f0c1f2b/coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f", size = 220302, upload-time = "2025-08-23T14:41:12.449Z" }, + { url = "https://files.pythonhosted.org/packages/db/d5/7c8a365e1f7355c58af4fe5faf3f90cc8e587590f5854808d17ccb4e7077/coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8", size = 218936, upload-time = "2025-08-23T14:41:13.872Z" }, + { url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106, upload-time = "2025-08-23T14:41:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353, upload-time = "2025-08-23T14:41:16.656Z" }, + { url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350, upload-time = "2025-08-23T14:41:18.128Z" }, + { url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955, upload-time = "2025-08-23T14:41:19.577Z" }, + { url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230, upload-time = "2025-08-23T14:41:20.959Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387, upload-time = "2025-08-23T14:41:22.644Z" }, + { url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280, upload-time = "2025-08-23T14:41:24.061Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894, upload-time = "2025-08-23T14:41:26.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536, upload-time = "2025-08-23T14:41:27.694Z" }, + { url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330, upload-time = "2025-08-23T14:41:29.081Z" }, + { url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961, upload-time = "2025-08-23T14:41:30.511Z" }, + { url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819, upload-time = "2025-08-23T14:41:31.962Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040, upload-time = "2025-08-23T14:41:33.472Z" }, + { url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374, upload-time = "2025-08-23T14:41:34.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551, upload-time = "2025-08-23T14:41:36.333Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776, upload-time = "2025-08-23T14:41:38.25Z" }, + { url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326, upload-time = "2025-08-23T14:41:40.343Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090, upload-time = "2025-08-23T14:41:42.106Z" }, + { url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217, upload-time = "2025-08-23T14:41:43.591Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194, upload-time = "2025-08-23T14:41:45.051Z" }, + { url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258, upload-time = "2025-08-23T14:41:46.44Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521, upload-time = "2025-08-23T14:41:47.882Z" }, + { url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090, upload-time = "2025-08-23T14:41:49.327Z" }, + { url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365, upload-time = "2025-08-23T14:41:50.796Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413, upload-time = "2025-08-23T14:41:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943, upload-time = "2025-08-23T14:41:53.922Z" }, + { url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301, upload-time = "2025-08-23T14:41:56.528Z" }, + { url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302, upload-time = "2025-08-23T14:41:58.171Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237, upload-time = "2025-08-23T14:41:59.703Z" }, + { url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726, upload-time = "2025-08-23T14:42:01.343Z" }, + { url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825, upload-time = "2025-08-23T14:42:03.263Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618, upload-time = "2025-08-23T14:42:05.037Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199, upload-time = "2025-08-23T14:42:06.662Z" }, + { url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833, upload-time = "2025-08-23T14:42:08.262Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048, upload-time = "2025-08-23T14:42:10.247Z" }, + { url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549, upload-time = "2025-08-23T14:42:11.811Z" }, + { url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715, upload-time = "2025-08-23T14:42:13.505Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969, upload-time = "2025-08-23T14:42:15.422Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408, upload-time = "2025-08-23T14:42:16.971Z" }, + { url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168, upload-time = "2025-08-23T14:42:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317, upload-time = "2025-08-23T14:42:20.005Z" }, + { url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600, upload-time = "2025-08-23T14:42:22.027Z" }, + { url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714, upload-time = "2025-08-23T14:42:23.616Z" }, + { url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735, upload-time = "2025-08-23T14:42:25.156Z" }, + { url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736, upload-time = "2025-08-23T14:42:43.145Z" }, ] [package.optional-dependencies] @@ -470,7 +483,7 @@ wheels = [ [[package]] name = "flask" -version = "3.1.1" +version = "3.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "blinker" }, @@ -480,9 +493,9 @@ dependencies = [ { name = "markupsafe" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/de/e47735752347f4128bcf354e0da07ef311a78244eba9e3dc1d4a5ab21a98/flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e", size = 753440, upload-time = "2025-05-13T15:01:17.447Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160, upload-time = "2025-08-19T21:03:21.205Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/68/9d4508e893976286d2ead7f8f571314af6c2037af34853a30fd769c02e9d/flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c", size = 103305, upload-time = "2025-05-13T15:01:15.591Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308, upload-time = "2025-08-19T21:03:19.499Z" }, ] [[package]] @@ -499,14 +512,62 @@ wheels = [ [[package]] name = "griffe" -version = "1.11.1" +version = "1.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/18/0f/9cbd56eb047de77a4b93d8d4674e70cd19a1ff64d7410651b514a1ed93d5/griffe-1.11.1.tar.gz", hash = "sha256:d54ffad1ec4da9658901eb5521e9cddcdb7a496604f67d8ae71077f03f549b7e", size = 410996, upload-time = "2025-08-11T11:38:35.528Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/a3/451ffd422ce143758a39c0290aaa7c9727ecc2bcc19debd7a8f3c6075ce9/griffe-1.11.1-py3-none-any.whl", hash = "sha256:5799cf7c513e4b928cfc6107ee6c4bc4a92e001f07022d97fd8dee2f612b6064", size = 138745, upload-time = "2025-08-11T11:38:33.964Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/c6/b5/23b91f22b7b3a7f8f62223f6664946271c0f5cb4179605a3e6bbae863920/griffe-1.13.0.tar.gz", hash = "sha256:246ea436a5e78f7fbf5f24ca8a727bb4d2a4b442a2959052eea3d0bfe9a076e0", size = 412759, upload-time = "2025-08-26T13:27:11.422Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/8c/b7cfdd8dfe48f6b09f7353323732e1a290c388bd14f216947928dc85f904/griffe-1.13.0-py3-none-any.whl", hash = "sha256:470fde5b735625ac0a36296cd194617f039e9e83e301fcbd493e2b58382d0559", size = 139365, upload-time = "2025-08-26T13:27:09.882Z" }, +] + +[[package]] +name = "grpcio" +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/54/68e51a90797ad7afc5b0a7881426c337f6a9168ebab73c3210b76aa7c90d/grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907", size = 5481935, upload-time = "2025-07-24T18:52:43.756Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/af817c7e9843929e93e54d09c9aee2555c2e8d81b93102a9426b36e91833/grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb", size = 10986796, upload-time = "2025-07-24T18:52:47.219Z" }, + { url = "https://files.pythonhosted.org/packages/d5/94/d67756638d7bb07750b07d0826c68e414124574b53840ba1ff777abcd388/grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486", size = 5983663, upload-time = "2025-07-24T18:52:49.463Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/c5e4853bf42148fea8532d49e919426585b73eafcf379a712934652a8de9/grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11", size = 6653765, upload-time = "2025-07-24T18:52:51.094Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/a1991dd64b331d199935e096cc9daa3415ee5ccbe9f909aa48eded7bba34/grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9", size = 6215172, upload-time = "2025-07-24T18:52:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/7cef3dbb3b073d0ce34fd507efc44ac4c9442a0ef9fba4fb3f5c551efef5/grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc", size = 6329142, upload-time = "2025-07-24T18:52:54.927Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d3/587920f882b46e835ad96014087054655312400e2f1f1446419e5179a383/grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e", size = 7018632, upload-time = "2025-07-24T18:52:56.523Z" }, + { url = "https://files.pythonhosted.org/packages/1f/95/c70a3b15a0bc83334b507e3d2ae20ee8fa38d419b8758a4d838f5c2a7d32/grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82", size = 6509641, upload-time = "2025-07-24T18:52:58.495Z" }, + { url = "https://files.pythonhosted.org/packages/4b/06/2e7042d06247d668ae69ea6998eca33f475fd4e2855f94dcb2aa5daef334/grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7", size = 3817478, upload-time = "2025-07-24T18:53:00.128Z" }, + { url = "https://files.pythonhosted.org/packages/93/20/e02b9dcca3ee91124060b65bbf5b8e1af80b3b76a30f694b44b964ab4d71/grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5", size = 4493971, upload-time = "2025-07-24T18:53:02.068Z" }, + { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, + { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, + { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, + { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, + { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, + { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, + { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, + { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488, upload-time = "2025-07-24T18:53:41.174Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059, upload-time = "2025-07-24T18:53:43.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647, upload-time = "2025-07-24T18:53:45.269Z" }, + { url = "https://files.pythonhosted.org/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101, upload-time = "2025-07-24T18:53:47.015Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562, upload-time = "2025-07-24T18:53:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425, upload-time = "2025-07-24T18:53:50.847Z" }, + { url = "https://files.pythonhosted.org/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533, upload-time = "2025-07-24T18:53:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489, upload-time = "2025-07-24T18:53:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811, upload-time = "2025-07-24T18:53:56.798Z" }, + { url = "https://files.pythonhosted.org/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214, upload-time = "2025-07-24T18:53:59.771Z" }, ] [[package]] @@ -520,15 +581,15 @@ wheels = [ [[package]] name = "h2" -version = "4.2.0" +version = "4.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "hpack" }, { name = "hyperframe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682, upload-time = "2025-02-02T07:43:51.815Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957, upload-time = "2025-02-01T11:02:26.481Z" }, + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, ] [[package]] @@ -753,16 +814,16 @@ wheels = [ [[package]] name = "mkdocs-autorefs" -version = "1.4.2" +version = "1.4.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown" }, { name = "markupsafe" }, { name = "mkdocs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/47/0c/c9826f35b99c67fa3a7cddfa094c1a6c43fafde558c309c6e4403e5b37dc/mkdocs_autorefs-1.4.2.tar.gz", hash = "sha256:e2ebe1abd2b67d597ed19378c0fff84d73d1dbce411fce7a7cc6f161888b6749", size = 54961, upload-time = "2025-05-20T13:09:09.886Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/fa/9124cd63d822e2bcbea1450ae68cdc3faf3655c69b455f3a7ed36ce6c628/mkdocs_autorefs-1.4.3.tar.gz", hash = "sha256:beee715b254455c4aa93b6ef3c67579c399ca092259cc41b7d9342573ff1fc75", size = 55425, upload-time = "2025-08-26T14:23:17.223Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/dc/fc063b78f4b769d1956319351704e23ebeba1e9e1d6a41b4b602325fd7e4/mkdocs_autorefs-1.4.2-py3-none-any.whl", hash = "sha256:83d6d777b66ec3c372a1aad4ae0cf77c243ba5bcda5bf0c6b8a2c5e7a3d89f13", size = 24969, upload-time = "2025-05-20T13:09:08.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4d/7123b6fa2278000688ebd338e2a06d16870aaf9eceae6ba047ea05f92df1/mkdocs_autorefs-1.4.3-py3-none-any.whl", hash = "sha256:469d85eb3114801d08e9cc55d102b3ba65917a869b893403b8987b601cf55dc9", size = 25034, upload-time = "2025-08-26T14:23:15.906Z" }, ] [[package]] @@ -781,11 +842,12 @@ wheels = [ [[package]] name = "mkdocs-material" -version = "9.6.16" +version = "9.6.18" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "babel" }, { name = "backrefs" }, + { name = "click" }, { name = "colorama" }, { name = "jinja2" }, { name = "markdown" }, @@ -796,9 +858,9 @@ dependencies = [ { name = "pymdown-extensions" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/84/aec27a468c5e8c27689c71b516fb5a0d10b8fca45b9ad2dd9d6e43bc4296/mkdocs_material-9.6.16.tar.gz", hash = "sha256:d07011df4a5c02ee0877496d9f1bfc986cfb93d964799b032dd99fe34c0e9d19", size = 4028828, upload-time = "2025-07-26T15:53:47.542Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/46/db0d78add5aac29dfcd0a593bcc6049c86c77ba8a25b3a5b681c190d5e99/mkdocs_material-9.6.18.tar.gz", hash = "sha256:a2eb253bcc8b66f8c6eaf8379c10ed6e9644090c2e2e9d0971c7722dc7211c05", size = 4034856, upload-time = "2025-08-22T08:21:47.575Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/f4/90ad67125b4dd66e7884e4dbdfab82e3679eb92b751116f8bb25ccfe2f0c/mkdocs_material-9.6.16-py3-none-any.whl", hash = "sha256:8d1a1282b892fe1fdf77bfeb08c485ba3909dd743c9ba69a19a40f637c6ec18c", size = 9223743, upload-time = "2025-07-26T15:53:44.236Z" }, + { url = "https://files.pythonhosted.org/packages/22/0b/545a4f8d4f9057e77f1d99640eb09aaae40c4f9034707f25636caf716ff9/mkdocs_material-9.6.18-py3-none-any.whl", hash = "sha256:dbc1e146a0ecce951a4d84f97b816a54936cdc9e1edd1667fc6868878ac06701", size = 9232642, upload-time = "2025-08-22T08:21:44.52Z" }, ] [[package]] @@ -834,7 +896,7 @@ python = [ [[package]] name = "mkdocstrings-python" -version = "1.16.12" +version = "1.18.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe" }, @@ -842,9 +904,9 @@ dependencies = [ { name = "mkdocstrings" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/ed/b886f8c714fd7cccc39b79646b627dbea84cd95c46be43459ef46852caf0/mkdocstrings_python-1.16.12.tar.gz", hash = "sha256:9b9eaa066e0024342d433e332a41095c4e429937024945fea511afe58f63175d", size = 206065, upload-time = "2025-06-03T12:52:49.276Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/f2/fdbdd0da3877906a062ce397b56f97614b0fce5f168e20ccb183f23834c0/mkdocstrings_python-1.18.1.tar.gz", hash = "sha256:89e02225a6d2e238337cad35cf25e4ed4df70862feb1ecc967d7d4e2c1737364", size = 207760, upload-time = "2025-08-28T10:46:08.04Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/dd/a24ee3de56954bfafb6ede7cd63c2413bb842cc48eb45e41c43a05a33074/mkdocstrings_python-1.16.12-py3-none-any.whl", hash = "sha256:22ded3a63b3d823d57457a70ff9860d5a4de9e8b1e482876fc9baabaf6f5f374", size = 124287, upload-time = "2025-06-03T12:52:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/b6/de/d6c16289279328c1bb930b28ceea367f57b276a3b19a6c7e9fda94e40a47/mkdocstrings_python-1.18.1-py3-none-any.whl", hash = "sha256:0a96a084e7b0ef36db190a222a4545efe13e8664382af44f9d4bf39d0d0218a2", size = 138200, upload-time = "2025-08-28T10:46:06.636Z" }, ] [[package]] @@ -908,11 +970,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, ] [[package]] @@ -971,15 +1033,15 @@ wheels = [ [[package]] name = "pyright" -version = "1.1.403" +version = "1.1.404" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/f6/35f885264ff08c960b23d1542038d8da86971c5d8c955cfab195a4f672d7/pyright-1.1.403.tar.gz", hash = "sha256:3ab69b9f41c67fb5bbb4d7a36243256f0d549ed3608678d381d5f51863921104", size = 3913526, upload-time = "2025-07-09T07:15:52.882Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/6e/026be64c43af681d5632722acd100b06d3d39f383ec382ff50a71a6d5bce/pyright-1.1.404.tar.gz", hash = "sha256:455e881a558ca6be9ecca0b30ce08aa78343ecc031d37a198ffa9a7a1abeb63e", size = 4065679, upload-time = "2025-08-20T18:46:14.029Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/49/b6/b04e5c2f41a5ccad74a1a4759da41adb20b4bc9d59a5e08d29ba60084d07/pyright-1.1.403-py3-none-any.whl", hash = "sha256:c0eeca5aa76cbef3fcc271259bbd785753c7ad7bcac99a9162b4c4c7daed23b3", size = 5684504, upload-time = "2025-07-09T07:15:50.958Z" }, + { url = "https://files.pythonhosted.org/packages/84/30/89aa7f7d7a875bbb9a577d4b1dc5a3e404e3d2ae2657354808e905e358e0/pyright-1.1.404-py3-none-any.whl", hash = "sha256:c7b7ff1fdb7219c643079e4c3e7d4125f0dafcc19d253b47e898d130ea426419", size = 5902951, upload-time = "2025-08-20T18:46:12.096Z" }, ] [package.optional-dependencies] @@ -1102,7 +1164,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.4" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1110,34 +1172,35 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] name = "ruff" -version = "0.12.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/81/0bd3594fa0f690466e41bd033bdcdf86cba8288345ac77ad4afbe5ec743a/ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71", size = 5197814, upload-time = "2025-07-29T22:32:35.877Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/d2/6cb35e9c85e7a91e8d22ab32ae07ac39cc34a71f1009a6f9e4a2a019e602/ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303", size = 11852189, upload-time = "2025-07-29T22:31:41.281Z" }, - { url = "https://files.pythonhosted.org/packages/63/5b/a4136b9921aa84638f1a6be7fb086f8cad0fde538ba76bda3682f2599a2f/ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb", size = 12519389, upload-time = "2025-07-29T22:31:54.265Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c9/3e24a8472484269b6b1821794141f879c54645a111ded4b6f58f9ab0705f/ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3", size = 11743384, upload-time = "2025-07-29T22:31:59.575Z" }, - { url = "https://files.pythonhosted.org/packages/26/7c/458dd25deeb3452c43eaee853c0b17a1e84169f8021a26d500ead77964fd/ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860", size = 11943759, upload-time = "2025-07-29T22:32:01.95Z" }, - { url = "https://files.pythonhosted.org/packages/7f/8b/658798472ef260ca050e400ab96ef7e85c366c39cf3dfbef4d0a46a528b6/ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c", size = 11654028, upload-time = "2025-07-29T22:32:04.367Z" }, - { url = "https://files.pythonhosted.org/packages/a8/86/9c2336f13b2a3326d06d39178fd3448dcc7025f82514d1b15816fe42bfe8/ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423", size = 13225209, upload-time = "2025-07-29T22:32:06.952Z" }, - { url = "https://files.pythonhosted.org/packages/76/69/df73f65f53d6c463b19b6b312fd2391dc36425d926ec237a7ed028a90fc1/ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb", size = 14182353, upload-time = "2025-07-29T22:32:10.053Z" }, - { url = "https://files.pythonhosted.org/packages/58/1e/de6cda406d99fea84b66811c189b5ea139814b98125b052424b55d28a41c/ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd", size = 13631555, upload-time = "2025-07-29T22:32:12.644Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ae/625d46d5164a6cc9261945a5e89df24457dc8262539ace3ac36c40f0b51e/ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e", size = 12667556, upload-time = "2025-07-29T22:32:15.312Z" }, - { url = "https://files.pythonhosted.org/packages/55/bf/9cb1ea5e3066779e42ade8d0cd3d3b0582a5720a814ae1586f85014656b6/ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606", size = 12939784, upload-time = "2025-07-29T22:32:17.69Z" }, - { url = "https://files.pythonhosted.org/packages/55/7f/7ead2663be5627c04be83754c4f3096603bf5e99ed856c7cd29618c691bd/ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8", size = 11771356, upload-time = "2025-07-29T22:32:20.134Z" }, - { url = "https://files.pythonhosted.org/packages/17/40/a95352ea16edf78cd3a938085dccc55df692a4d8ba1b3af7accbe2c806b0/ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa", size = 11612124, upload-time = "2025-07-29T22:32:22.645Z" }, - { url = "https://files.pythonhosted.org/packages/4d/74/633b04871c669e23b8917877e812376827c06df866e1677f15abfadc95cb/ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5", size = 12479945, upload-time = "2025-07-29T22:32:24.765Z" }, - { url = "https://files.pythonhosted.org/packages/be/34/c3ef2d7799c9778b835a76189c6f53c179d3bdebc8c65288c29032e03613/ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4", size = 12998677, upload-time = "2025-07-29T22:32:27.022Z" }, - { url = "https://files.pythonhosted.org/packages/77/ab/aca2e756ad7b09b3d662a41773f3edcbd262872a4fc81f920dc1ffa44541/ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77", size = 11756687, upload-time = "2025-07-29T22:32:29.381Z" }, - { url = "https://files.pythonhosted.org/packages/b4/71/26d45a5042bc71db22ddd8252ca9d01e9ca454f230e2996bb04f16d72799/ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f", size = 12912365, upload-time = "2025-07-29T22:32:31.517Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9b/0b8aa09817b63e78d94b4977f18b1fcaead3165a5ee49251c5d5c245bb2d/ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69", size = 11982083, upload-time = "2025-07-29T22:32:33.881Z" }, +version = "0.12.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/55/16ab6a7d88d93001e1ae4c34cbdcfb376652d761799459ff27c1dc20f6fa/ruff-0.12.11.tar.gz", hash = "sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d", size = 5347103, upload-time = "2025-08-28T13:59:08.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/a2/3b3573e474de39a7a475f3fbaf36a25600bfeb238e1a90392799163b64a0/ruff-0.12.11-py3-none-linux_armv6l.whl", hash = "sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065", size = 11979885, upload-time = "2025-08-28T13:58:26.654Z" }, + { url = "https://files.pythonhosted.org/packages/76/e4/235ad6d1785a2012d3ded2350fd9bc5c5af8c6f56820e696b0118dfe7d24/ruff-0.12.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93", size = 12742364, upload-time = "2025-08-28T13:58:30.256Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0d/15b72c5fe6b1e402a543aa9d8960e0a7e19dfb079f5b0b424db48b7febab/ruff-0.12.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd", size = 11920111, upload-time = "2025-08-28T13:58:33.677Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c0/f66339d7893798ad3e17fa5a1e587d6fd9806f7c1c062b63f8b09dda6702/ruff-0.12.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee", size = 12160060, upload-time = "2025-08-28T13:58:35.74Z" }, + { url = "https://files.pythonhosted.org/packages/03/69/9870368326db26f20c946205fb2d0008988aea552dbaec35fbacbb46efaa/ruff-0.12.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0", size = 11799848, upload-time = "2025-08-28T13:58:38.051Z" }, + { url = "https://files.pythonhosted.org/packages/25/8c/dd2c7f990e9b3a8a55eee09d4e675027d31727ce33cdb29eab32d025bdc9/ruff-0.12.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644", size = 13536288, upload-time = "2025-08-28T13:58:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/7a/30/d5496fa09aba59b5e01ea76775a4c8897b13055884f56f1c35a4194c2297/ruff-0.12.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211", size = 14490633, upload-time = "2025-08-28T13:58:42.285Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2f/81f998180ad53445d403c386549d6946d0748e536d58fce5b5e173511183/ruff-0.12.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793", size = 13888430, upload-time = "2025-08-28T13:58:44.641Z" }, + { url = "https://files.pythonhosted.org/packages/87/71/23a0d1d5892a377478c61dbbcffe82a3476b050f38b5162171942a029ef3/ruff-0.12.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee", size = 12913133, upload-time = "2025-08-28T13:58:47.039Z" }, + { url = "https://files.pythonhosted.org/packages/80/22/3c6cef96627f89b344c933781ed38329bfb87737aa438f15da95907cbfd5/ruff-0.12.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8", size = 13169082, upload-time = "2025-08-28T13:58:49.157Z" }, + { url = "https://files.pythonhosted.org/packages/05/b5/68b3ff96160d8b49e8dd10785ff3186be18fd650d356036a3770386e6c7f/ruff-0.12.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f", size = 13139490, upload-time = "2025-08-28T13:58:51.593Z" }, + { url = "https://files.pythonhosted.org/packages/59/b9/050a3278ecd558f74f7ee016fbdf10591d50119df8d5f5da45a22c6afafc/ruff-0.12.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000", size = 11958928, upload-time = "2025-08-28T13:58:53.943Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bc/93be37347db854806904a43b0493af8d6873472dfb4b4b8cbb27786eb651/ruff-0.12.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2", size = 11764513, upload-time = "2025-08-28T13:58:55.976Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a1/1471751e2015a81fd8e166cd311456c11df74c7e8769d4aabfbc7584c7ac/ruff-0.12.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39", size = 12745154, upload-time = "2025-08-28T13:58:58.16Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/2542b14890d0f4872dd81b7b2a6aed3ac1786fae1ce9b17e11e6df9e31e3/ruff-0.12.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9", size = 13227653, upload-time = "2025-08-28T13:59:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/2fbfc61047dbfd009c58a28369a693a1484ad15441723be1cd7fe69bb679/ruff-0.12.11-py3-none-win32.whl", hash = "sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3", size = 11944270, upload-time = "2025-08-28T13:59:02.347Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/34276984705bfe069cd383101c45077ee029c3fe3b28225bf67aa35f0647/ruff-0.12.11-py3-none-win_amd64.whl", hash = "sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd", size = 13046600, upload-time = "2025-08-28T13:59:04.751Z" }, + { url = "https://files.pythonhosted.org/packages/84/a8/001d4a7c2b37623a3fd7463208267fb906df40ff31db496157549cfd6e72/ruff-0.12.11-py3-none-win_arm64.whl", hash = "sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea", size = 12135290, upload-time = "2025-08-28T13:59:06.933Z" }, ] [[package]] @@ -1160,15 +1223,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.47.2" +version = "0.47.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948, upload-time = "2025-07-20T17:31:58.522Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984, upload-time = "2025-07-20T17:31:56.738Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" }, ] [[package]] @@ -1223,13 +1286,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] +[[package]] +name = "types-grpcio" +version = "1.0.0.20250703" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/17/c80b7231e337993b808aef32bcc556aad60810ca51eeb7acd1204eefe518/types_grpcio-1.0.0.20250703.tar.gz", hash = "sha256:baf100184e5353cb60f045fb4fd47f37a360bedf0f19581535e4c3a3a1f7912b", size = 14552, upload-time = "2025-07-03T03:14:01.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/e2/9c682ba59a7f9cf477bc7954c25ce1c20f881693651f354d15df577996c0/types_grpcio-1.0.0.20250703-py3-none-any.whl", hash = "sha256:78d1bfc33b58a56697ef99e666e34be4c6887631341c75fdd28d58587aef5d9f", size = 15274, upload-time = "2025-07-03T03:14:00.907Z" }, +] + [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]]