diff --git a/README.md b/README.md index 6e42a2019..ca8a000f6 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,7 @@ informal introduction to the features and their implementation. - [Custom Type Data Conversion](#custom-type-data-conversion) - [External Storage](#external-storage) - [Driver Selection](#driver-selection) + - [Built-in Drivers](#built-in-drivers) - [Custom Drivers](#custom-drivers) - [Workers](#workers) - [Workflows](#workflows) @@ -467,25 +468,36 @@ External storage allows large payloads to be offloaded to an external storage se External storage is configured via the `external_storage` parameter on `DataConverter`. It should be configured on the `Client` both for clients of your workflow as well as on the worker -- anywhere large payloads may be uploaded or downloaded. -A `StorageDriver` handles uploading and downloading payloads. Temporal provides built-in drivers for common storage solutions, or you may customize one. Here's an example using our provided `InMemoryTestDriver`. +A `StorageDriver` handles uploading and downloading payloads. Temporal provides [built-in drivers](#built-in-drivers) for common storage solutions, or you may implement a [custom driver](#custom-drivers). Here's an example using the built-in `S3StorageDriver` with the SDK's `aioboto3` client: ```python +import aioboto3 import dataclasses -from temporalio.client import Client +from temporalio.client import Client, ClientConfig +from temporalio.contrib.aws.s3driver import S3StorageDriver +from temporalio.contrib.aws.s3driver.aioboto3 import new_aioboto3_client from temporalio.converter import DataConverter from temporalio.converter import ExternalStorage -driver = InMemoryTestDriver() +client_config = ClientConfig.load_client_connect_config() -client = await Client.connect( - "localhost:7233", - data_converter=dataclasses.replace( - DataConverter.default, - external_storage=ExternalStorage(drivers=[driver]), - ), -) +session = aioboto3.Session() +async with session.client("s3") as s3_client: + driver = S3StorageDriver( + client=new_aioboto3_client(s3_client), + bucket="my-bucket", + ) + client = await Client.connect( + **client_config, + data_converter=dataclasses.replace( + DataConverter.default, + external_storage=ExternalStorage(drivers=[driver]), + ), + ) ``` +See the [S3 driver README](temporalio/contrib/aws/s3driver/) for further details. + Some things to note about external storage: * Only payloads that meet or exceed `ExternalStorage.payload_size_threshold` (default 256 KiB) are offloaded. Smaller payloads are stored inline as normal. @@ -540,6 +552,10 @@ Some things to note about driver selection: * Returning `None` from a selector leaves the payload stored inline in workflow history rather than offloading it. * The driver instance returned by the selector must be one of the instances registered in `ExternalStorage.drivers`. If it is not, an error is raised. +###### Built-in Drivers + +- **[S3 Storage Driver](temporalio/contrib/aws/s3driver/)**: ⚠️ **Experimental** ⚠️ Amazon S3 driver. Ships with an aioboto3 client, or bring your own by subclassing `S3StorageDriverClient`. + ###### Custom Drivers Implement `temporalio.converter.StorageDriver` to integrate with an external storage system: diff --git a/pyproject.toml b/pyproject.toml index 4bcd3f03e..4ee2fed92 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,10 @@ opentelemetry = ["opentelemetry-api>=1.11.1,<2", "opentelemetry-sdk>=1.11.1,<2"] pydantic = ["pydantic>=2.0.0,<3"] openai-agents = ["openai-agents>=0.3,<0.7", "mcp>=1.9.4, <2"] google-adk = ["google-adk>=1.27.0,<2"] +aioboto3 = [ + "aioboto3>=10.4.0", + "types-aioboto3[s3]>=10.4.0", +] [project.urls] Homepage = "https://github.com/temporalio/sdk-python" @@ -64,6 +68,7 @@ dev = [ "openinference-instrumentation-google-adk>=0.1.8", "googleapis-common-protos==1.70.0", "pytest-rerunfailures>=16.1", + "moto[s3,server]>=5", ] [tool.poe.tasks] diff --git a/temporalio/contrib/aws/s3driver/README.md b/temporalio/contrib/aws/s3driver/README.md new file mode 100644 index 000000000..8e6a3e365 --- /dev/null +++ b/temporalio/contrib/aws/s3driver/README.md @@ -0,0 +1,104 @@ +# AWS Integration for Temporal Python SDK + +> ⚠️ **This package is currently at an experimental release stage.** ⚠️ + +This package provides AWS integrations for the Temporal Python SDK, including an Amazon S3 driver for [external storage](../../../README.md#external-storage). + +## S3 Driver + +`S3StorageDriver` stores and retrieves Temporal payloads in Amazon S3. It accepts any `S3StorageDriverClient` implementation and a `bucket` — either a static name or a callable for dynamic per-payload selection. + +### Using the built-in aioboto3 client + +The SDK ships with an [`aioboto3`](https://github.com/terrycain/aioboto3)-based client. Install the extra to pull in its dependencies: + + python -m pip install "temporalio[aioboto3]" + +```python +import aioboto3 +import dataclasses +from temporalio.client import Client +from temporalio.contrib.aws.s3driver import S3StorageDriver +from temporalio.contrib.aws.s3driver.aioboto3 import new_aioboto3_client +from temporalio.converter import DataConverter, ExternalStorage + +session = aioboto3.Session() +# Credentials and region are resolved automatically from the standard AWS credential +# chain e.g. environment variables, ~/.aws/config, IAM instance profile, and so on. +async with session.client("s3") as s3_client: + driver = S3StorageDriver( + client=new_aioboto3_client(s3_client), + bucket="my-temporal-payloads", + ) + + client = await Client.connect( + "localhost:7233", + data_converter=dataclasses.replace( + DataConverter.default, + external_storage=ExternalStorage(drivers=[driver]), + ), + ) +``` + +### Custom S3 client implementations + +To use a different S3 library, subclass `S3StorageDriverClient` and implement `put_object`, `get_object`, and `object_exists`. The ABC has no external dependencies, so no AWS packages are required to import it. + +```python +from temporalio.contrib.aws.s3driver import S3StorageDriverClient + +class MyS3Client(S3StorageDriverClient): + async def put_object(self, *, bucket: str, key: str, data: bytes) -> None: ... + async def object_exists(self, *, bucket: str, key: str) -> bool: ... + async def get_object(self, *, bucket: str, key: str) -> bytes: ... + +driver = S3StorageDriver(client=MyS3Client(), bucket="my-temporal-payloads") +``` + +### Key structure + +Payloads are stored under content-addressable keys derived from a SHA-256 hash of the serialized payload bytes, segmented by namespace and workflow/activity identifiers when serialization context is available, e.g.: + + v0/ns/my-namespace/wfi/my-workflow-id/d/sha256/ + +### Notes + +* Any driver used to store payloads must also be configured on the component that retrieves them. If the client stores workflow inputs using this driver, the worker must include it in its `ExternalStorage.drivers` list to retrieve them. +* The target S3 bucket must already exist; the driver will not create it. +* Identical serialized bytes within the same namespace and workflow (or activity) share the same S3 object — the key is content-addressable within that scope. The same bytes used across different workflows or namespaces produce distinct S3 objects because the key includes the namespace and workflow/activity identifiers. +* Only payloads at or above `ExternalStorage.payload_size_threshold` (default: 256 KiB) are offloaded; smaller payloads are stored inline. Set `ExternalStorage.payload_size_threshold` to `None` to offload every payload regardless of size. +* `S3StorageDriver.max_payload_size` (default: 50 MiB) sets a hard upper limit on the serialized size of any single payload. A `ValueError` is raised at store time if a payload exceeds this limit. Increase it if your workflows produce payloads larger than 50 MiB. +* Override `S3StorageDriver.driver_name` only when registering multiple `S3StorageDriver` instances with distinct configurations under the same `ExternalStorage.drivers` list. + +### Dynamic Bucket Selection + +To select the S3 bucket per payload, pass a callable as `bucket`: + +```python +from temporalio.contrib.aws.s3driver import S3StorageDriver +from temporalio.contrib.aws.s3driver.aioboto3 import new_aioboto3_client + +driver = S3StorageDriver( + client=new_aioboto3_client(s3_client), + bucket=lambda context, payload: ( + "large-payloads" if payload.ByteSize() > 10 * 1024 * 1024 else "small-payloads" + ), +) +``` + +### Required IAM permissions + +The AWS credentials used by your S3 client must have the following S3 permissions on the target bucket and its objects: + +```json +{ + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject" + ], + "Resource": "arn:aws:s3:::my-temporal-payloads/*" +} +``` + +`s3:PutObject` is required by components that store payloads (typically the Temporal client and worker sending workflow/activity inputs), and `s3:GetObject` is required by components that retrieve them (typically workers and clients reading results). Components that only retrieve payloads do not need `s3:PutObject`, and vice versa. diff --git a/temporalio/contrib/aws/s3driver/__init__.py b/temporalio/contrib/aws/s3driver/__init__.py new file mode 100644 index 000000000..cdc349e24 --- /dev/null +++ b/temporalio/contrib/aws/s3driver/__init__.py @@ -0,0 +1,13 @@ +"""Amazon S3 storage driver for Temporal external storage. + +.. warning:: + This API is experimental. +""" + +from temporalio.contrib.aws.s3driver._client import S3StorageDriverClient +from temporalio.contrib.aws.s3driver._driver import S3StorageDriver + +__all__ = [ + "S3StorageDriverClient", + "S3StorageDriver", +] diff --git a/temporalio/contrib/aws/s3driver/_client.py b/temporalio/contrib/aws/s3driver/_client.py new file mode 100644 index 000000000..16e4c6a8c --- /dev/null +++ b/temporalio/contrib/aws/s3driver/_client.py @@ -0,0 +1,32 @@ +"""S3 storage driver client abstraction for the S3 storage driver. + +.. warning:: + This API is experimental. +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod + + +class S3StorageDriverClient(ABC): + """Abstract base class for S3 object operations. + + Implementations must support ``put_object`` and ``get_object``. Multipart + upload handling (if needed) is an internal concern of each implementation. + + .. warning:: + This API is experimental. + """ + + @abstractmethod + async def put_object(self, *, bucket: str, key: str, data: bytes) -> None: + """Upload *data* to the given S3 *bucket* and *key*.""" + + @abstractmethod + async def object_exists(self, *, bucket: str, key: str) -> bool: + """Return ``True`` if an object exists at the given *bucket* and *key*.""" + + @abstractmethod + async def get_object(self, *, bucket: str, key: str) -> bytes: + """Download and return the bytes stored at the given S3 *bucket* and *key*.""" diff --git a/temporalio/contrib/aws/s3driver/_driver.py b/temporalio/contrib/aws/s3driver/_driver.py new file mode 100644 index 000000000..481e3a9d4 --- /dev/null +++ b/temporalio/contrib/aws/s3driver/_driver.py @@ -0,0 +1,228 @@ +"""Amazon S3 storage driver for Temporal external storage. + +.. warning:: + This API is experimental. +""" + +from __future__ import annotations + +import asyncio +import hashlib +import urllib.parse +from collections.abc import Callable, Coroutine, Sequence +from typing import Any, TypeVar + +from temporalio.api.common.v1 import Payload +from temporalio.contrib.aws.s3driver._client import S3StorageDriverClient +from temporalio.converter import ( + ActivitySerializationContext, + StorageDriver, + StorageDriverClaim, + StorageDriverRetrieveContext, + StorageDriverStoreContext, + WorkflowSerializationContext, +) + +_T = TypeVar("_T") + + +async def _gather_with_cancellation( + coros: Sequence[Coroutine[Any, Any, _T]], +) -> list[_T]: + """Run coroutines concurrently, cancelling all remaining tasks if one fails.""" + if not coros: + return [] + tasks = [asyncio.ensure_future(c) for c in coros] + try: + return list(await asyncio.gather(*tasks)) + except BaseException: + for t in tasks: + t.cancel() + await asyncio.gather(*tasks, return_exceptions=True) + raise + + +class S3StorageDriver(StorageDriver): + """Driver for storing and retrieving Temporal payloads in Amazon S3. + + Requires an :class:`S3StorageDriverClient` and a ``bucket``. Payloads are keyed by + a SHA-256 hash of their serialized bytes, segmented by namespace and + workflow/activity identifiers derived from the serialization context. + + .. warning:: + This API is experimental. + """ + + def __init__( + self, + client: S3StorageDriverClient, + bucket: str | Callable[[StorageDriverStoreContext, Payload], str], + driver_name: str = "aws.s3driver", + max_payload_size: int = 50 * 1024 * 1024, + ): + """Constructs the S3 driver. + + Args: + client: An :class:`S3StorageDriverClient` implementation. Use + :func:`~temporalio.contrib.aws.s3driver.aioboto3.new_aioboto3_client` to + wrap an aioboto3 S3 client. + bucket: S3 bucket name, access point ARN, or a callable that + accepts ``(StorageDriverStoreContext, Payload)`` and returns + a bucket name. A callable allows dynamic per-payload bucket + selection. + driver_name: Name of this driver instance. Defaults to + ``"aws.s3driver"``. Override when registering + multiple S3StorageDriver instances with distinct configurations + under the same :attr:`~temporalio.extstore.Options.drivers` list. + max_payload_size: Maximum serialized payload size in bytes that the + driver will accept. Defaults to 52428800 (50 MiB). Raise this + value if your workload requires larger payloads; lower it to + enforce stricter limits. + """ + if max_payload_size <= 0: + raise ValueError("max_payload_size must be greater than zero") + self._client = client + self._bucket = bucket + self._driver_name = driver_name or "aws.s3driver" + self._max_payload_size = max_payload_size + + def name(self) -> str: + """Return the driver instance name.""" + return self._driver_name + + def type(self) -> str: + """Return the driver type identifier.""" + return "aws.s3driver" + + def _get_bucket(self, context: StorageDriverStoreContext, payload: Payload) -> str: + """Resolve bucket using the configured strategy.""" + if callable(self._bucket): + return self._bucket(context, payload) + return self._bucket + + async def store( + self, + context: StorageDriverStoreContext, + payloads: Sequence[Payload], + ) -> list[StorageDriverClaim]: + """Stores payloads in S3 and returns a :class:`~temporalio.extstore.DriverClaim` for each one. + + Payloads are keyed by their SHA-256 hash, so identical serialized bytes + share the same S3 object. Deduplication is best-effort because the same + Python value may serialize differently across payload converter versions + (e.g. proto binary). The returned list is the same length as + ``payloads``. + """ + workflow_id: str | None = None + activity_id: str | None = None + namespace: str | None = None + if isinstance(context.serialization_context, WorkflowSerializationContext): + workflow_id = context.serialization_context.workflow_id + namespace = context.serialization_context.namespace + if isinstance(context.serialization_context, ActivitySerializationContext): + # Prioritize workflow over activity so that the same payload that + # may be stored across workflow and activity boundaries are deduplicated. + if context.serialization_context.workflow_id: + workflow_id = context.serialization_context.workflow_id + elif context.serialization_context.activity_id: + activity_id = context.serialization_context.activity_id + namespace = context.serialization_context.namespace + + # URL encode values to avoid characters that break the key format + # e.g. spaces, forward-slashes, etc. + if namespace: + namespace = urllib.parse.quote(namespace, safe="") + if workflow_id: + workflow_id = urllib.parse.quote(workflow_id, safe="") + if activity_id: + activity_id = urllib.parse.quote(activity_id, safe="") + + namespace_segments = f"/ns/{namespace}" if namespace else "" + + context_segments = "" + # Prioritize workflow over activity so that the same payload that + # may be stored across workflow and activity boundaries are deduplicated. + # Workflow and Activity IDs are case sensitive. + if workflow_id: + context_segments += f"/wfi/{workflow_id}" + elif activity_id: + context_segments += f"/aci/{activity_id}" + + async def _upload(payload: Payload) -> StorageDriverClaim: + bucket = self._get_bucket(context, payload) + + payload_bytes = payload.SerializeToString() + if len(payload_bytes) > self._max_payload_size: + raise ValueError( + f"Payload size {len(payload_bytes)} bytes exceeds the configured " + f"max_payload_size of {self._max_payload_size} bytes" + ) + + hash_digest = hashlib.sha256(payload_bytes).hexdigest().lower() + + digest_segments = f"/d/sha256/{hash_digest}" + + key = f"v0{namespace_segments}{context_segments}{digest_segments}" + + try: + if not await self._client.object_exists(bucket=bucket, key=key): + await self._client.put_object( + bucket=bucket, key=key, data=payload_bytes + ) + except Exception as e: + raise RuntimeError( + f"S3StorageDriver store failed [bucket={bucket}, key={key}]" + ) from e + + return StorageDriverClaim( + claim_data={ + "bucket": bucket, + "key": key, + "hash_algorithm": "sha256", + "hash_value": hash_digest, + }, + ) + + return await _gather_with_cancellation([_upload(p) for p in payloads]) + + async def retrieve( + self, + context: StorageDriverRetrieveContext, # noqa: ARG002 + claims: Sequence[StorageDriverClaim], + ) -> list[Payload]: + """Retrieves payloads from S3 for the given :class:`~temporalio.extstore.DriverClaim` list.""" + + async def _download(claim: StorageDriverClaim) -> Payload: + bucket = claim.claim_data["bucket"] + key = claim.claim_data["key"] + + try: + payload_bytes = await self._client.get_object(bucket=bucket, key=key) + except Exception as e: + raise RuntimeError( + f"S3StorageDriver retrieve failed [bucket={bucket}, key={key}]" + ) from e + + expected_hash = claim.claim_data.get("hash_value") + hash_algorithm = claim.claim_data.get("hash_algorithm") + if expected_hash and hash_algorithm: + if hash_algorithm != "sha256": + raise ValueError( + f"S3StorageDriver unsupported hash algorithm " + f"[bucket={bucket}, key={key}]: " + f"expected sha256, got {hash_algorithm}" + ) + actual_hash = hashlib.sha256(payload_bytes).hexdigest().lower() + if actual_hash != expected_hash: + raise ValueError( + f"S3StorageDriver integrity check failed " + f"[bucket={bucket}, key={key}]: " + f"expected {hash_algorithm}:{expected_hash}, " + f"got {hash_algorithm}:{actual_hash}" + ) + + payload = Payload() + payload.ParseFromString(payload_bytes) + return payload + + return await _gather_with_cancellation([_download(c) for c in claims]) diff --git a/temporalio/contrib/aws/s3driver/aioboto3.py b/temporalio/contrib/aws/s3driver/aioboto3.py new file mode 100644 index 000000000..b3da8b7c6 --- /dev/null +++ b/temporalio/contrib/aws/s3driver/aioboto3.py @@ -0,0 +1,71 @@ +"""Aioboto3 adapter for the S3 storage driver client. + +.. warning:: + This API is experimental. +""" + +from __future__ import annotations + +import io + +from botocore.exceptions import ClientError +from types_aiobotocore_s3.client import S3Client + +from temporalio.contrib.aws.s3driver._client import S3StorageDriverClient + + +class _Aioboto3StorageDriverClient(S3StorageDriverClient): + """Adapter that wraps an aioboto3 S3 client as an :class:`S3StorageDriverClient`. + + Internally delegates to ``upload_fileobj`` for uploads (which handles + multipart automatically for objects above the multipart threshold) and + ``get_object`` for downloads. + + .. warning:: + This API is experimental. + """ + + def __init__(self, client: S3Client) -> None: + """Wrap an aioboto3 S3 client. + + Args: + client: An aioboto3 S3 client, typically obtained from + ``aioboto3.Session().client("s3")``. + """ + self._client = client + + async def object_exists(self, *, bucket: str, key: str) -> bool: + """Check existence via aioboto3's ``head_object``.""" + try: + await self._client.head_object(Bucket=bucket, Key=key) + return True + except ClientError as e: + # head_object returns 404 as a ClientError when the key doesn't exist. + if e.response.get("Error", {}).get("Code") == "404": + return False + raise + + async def put_object(self, *, bucket: str, key: str, data: bytes) -> None: + """Upload *data* via aioboto3's ``upload_fileobj``.""" + # upload_fileobj is an aioboto3-specific method not in the + # types_aiobotocore_s3 stubs; it handles multipart automatically. + await self._client.upload_fileobj(io.BytesIO(data), bucket, key) # type: ignore[arg-type] + + async def get_object(self, *, bucket: str, key: str) -> bytes: + """Download bytes via aioboto3's ``get_object``.""" + response = await self._client.get_object(Bucket=bucket, Key=key) + # StreamingBody.read() is untyped in aiobotocore, returns bytes at runtime. + return await response["Body"].read() # type: ignore[no-any-return] + + +def new_aioboto3_client(client: S3Client) -> S3StorageDriverClient: + """Create an :class:`S3StorageDriverClient` from an aioboto3 S3 client. + + Args: + client: An aioboto3 S3 client, typically obtained from + ``aioboto3.Session().client("s3")``. + + .. warning:: + This API is experimental. + """ + return _Aioboto3StorageDriverClient(client) diff --git a/tests/contrib/aws/__init__.py b/tests/contrib/aws/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/contrib/aws/s3driver/__init__.py b/tests/contrib/aws/s3driver/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/contrib/aws/s3driver/conftest.py b/tests/contrib/aws/s3driver/conftest.py new file mode 100644 index 000000000..71a0a8749 --- /dev/null +++ b/tests/contrib/aws/s3driver/conftest.py @@ -0,0 +1,65 @@ +"""Shared fixtures for S3 storage driver tests.""" + +from __future__ import annotations + +import socket +import urllib.request +from collections.abc import AsyncIterator, Iterator + +import aioboto3 +import pytest +from types_aiobotocore_s3.client import S3Client + +from temporalio.contrib.aws.s3driver import S3StorageDriverClient +from temporalio.contrib.aws.s3driver.aioboto3 import new_aioboto3_client + +BUCKET = "test-bucket" +REGION = "us-east-1" + + +def _find_free_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return s.getsockname()[1] + + +@pytest.fixture(scope="session") +def moto_server_url() -> Iterator[str]: + """Start a moto S3 server for the test session and yield its base URL.""" + port = _find_free_port() + from moto.server import ThreadedMotoServer + + server = ThreadedMotoServer(port=port) + server.start() + yield f"http://127.0.0.1:{port}" + server.stop() + + +@pytest.fixture +async def aioboto3_client(moto_server_url: str) -> AsyncIterator[S3Client]: + """Yield an aioboto3 S3 client pointed at the moto server. + + Resets all moto state before each test to guarantee isolation, then + pre-creates the standard test bucket. + """ + urllib.request.urlopen( + urllib.request.Request( + f"{moto_server_url}/moto-api/reset", method="POST", data=b"" + ) + ) + session = aioboto3.Session() + async with session.client( + "s3", + region_name=REGION, + endpoint_url=moto_server_url, + aws_access_key_id="testing", + aws_secret_access_key="testing", + ) as client: + await client.create_bucket(Bucket=BUCKET) + yield client + + +@pytest.fixture +def driver_client(aioboto3_client: S3Client) -> S3StorageDriverClient: + """Wrap the aioboto3 S3 client in an S3StorageDriverClient adapter.""" + return new_aioboto3_client(aioboto3_client) diff --git a/tests/contrib/aws/s3driver/test_s3driver.py b/tests/contrib/aws/s3driver/test_s3driver.py new file mode 100644 index 000000000..46184c8b7 --- /dev/null +++ b/tests/contrib/aws/s3driver/test_s3driver.py @@ -0,0 +1,831 @@ +"""Unit tests for S3StorageDriver using moto's ThreadedMotoServer to mock AWS S3. + +moto's standard mock_aws() context manager intercepts boto3/botocore via the +requests library and does not intercept aiobotocore (which aioboto3 wraps), +because aiobotocore uses aiohttp and returns coroutines where moto's mock +returns plain bytes. ThreadedMotoServer starts a real local HTTP server; the +aioboto3 client is pointed at it via endpoint_url so all API calls are +intercepted correctly. +""" + +from __future__ import annotations + +import asyncio +import hashlib +from collections.abc import Callable, Coroutine +from functools import wraps +from typing import Any +from unittest.mock import MagicMock + +import pytest +from botocore.exceptions import ClientError +from types_aiobotocore_s3.client import S3Client + +from temporalio.api.common.v1 import Payload +from temporalio.contrib.aws.s3driver import ( + S3StorageDriver, + S3StorageDriverClient, +) +from temporalio.converter import ( + ActivitySerializationContext, + JSONPlainPayloadConverter, + StorageDriverClaim, + StorageDriverRetrieveContext, + StorageDriverStoreContext, + WorkflowSerializationContext, +) +from tests.contrib.aws.s3driver.conftest import BUCKET + +_CONVERTER = JSONPlainPayloadConverter() + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def make_payload(value: str = "hello") -> Payload: + p = _CONVERTER.to_payload(value) + assert p is not None + return p + + +def make_store_context( + serialization_context: WorkflowSerializationContext + | ActivitySerializationContext + | None = None, +) -> StorageDriverStoreContext: + return StorageDriverStoreContext(serialization_context=serialization_context) + + +def make_workflow_context( + namespace: str = "my-namespace", + workflow_id: str = "my-workflow", +) -> WorkflowSerializationContext: + return WorkflowSerializationContext(namespace=namespace, workflow_id=workflow_id) + + +def make_activity_context( + namespace: str = "my-namespace", + activity_id: str | None = "my-activity", + workflow_id: str | None = None, + activity_task_queue: str | None = None, +) -> ActivitySerializationContext: + return ActivitySerializationContext( + namespace=namespace, + activity_id=activity_id, + activity_type=None, + activity_task_queue=activity_task_queue, + workflow_id=workflow_id, + workflow_type=None, + is_local=False, + ) + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +class CountingDriverClient(S3StorageDriverClient): + """S3StorageDriverClient wrapper that counts calls to each method.""" + + def __init__(self, delegate: S3StorageDriverClient) -> None: + self._delegate = delegate + self.put_object_count = 0 + self.get_object_count = 0 + self.object_exists_count = 0 + + async def put_object(self, *, bucket: str, key: str, data: bytes) -> None: + """Delegate to wrapped client and increment put_object counter.""" + self.put_object_count += 1 + await self._delegate.put_object(bucket=bucket, key=key, data=data) + + async def object_exists(self, *, bucket: str, key: str) -> bool: + """Delegate to wrapped client and increment object_exists counter.""" + self.object_exists_count += 1 + return await self._delegate.object_exists(bucket=bucket, key=key) + + async def get_object(self, *, bucket: str, key: str) -> bytes: + """Delegate to wrapped client and increment get_object counter.""" + self.get_object_count += 1 + return await self._delegate.get_object(bucket=bucket, key=key) + + +class FailOnceDriverClient(S3StorageDriverClient): + """S3StorageDriverClient wrapper that fails the first call to a specified + method and blocks subsequent calls until cancelled. + + Used to verify that the driver cancels in-flight tasks when one fails. + """ + + def __init__( + self, + delegate: S3StorageDriverClient, + fail_on: str, + ) -> None: + self._delegate = delegate + self._fail_on = fail_on + self._call_count = 0 + self.cancelled: list[bool] = [] + + async def _maybe_fail(self) -> None: + self._call_count += 1 + if self._call_count == 1: + raise ConnectionError("S3 connection lost") + try: + await asyncio.sleep(60) + except asyncio.CancelledError: + self.cancelled.append(True) + raise + + async def put_object(self, *, bucket: str, key: str, data: bytes) -> None: + """Delegate or fail depending on configuration.""" + if self._fail_on == "put_object": + await self._maybe_fail() + await self._delegate.put_object(bucket=bucket, key=key, data=data) + + async def object_exists(self, *, bucket: str, key: str) -> bool: + """Delegate or fail depending on configuration.""" + if self._fail_on == "object_exists": + await self._maybe_fail() + return await self._delegate.object_exists(bucket=bucket, key=key) + + async def get_object(self, *, bucket: str, key: str) -> bytes: + """Delegate or fail depending on configuration.""" + if self._fail_on == "get_object": + await self._maybe_fail() + return await self._delegate.get_object(bucket=bucket, key=key) + + +@pytest.fixture +def counting_driver_client( + driver_client: S3StorageDriverClient, +) -> CountingDriverClient: + """Wrap the driver client in a counting decorator.""" + return CountingDriverClient(driver_client) + + +# --------------------------------------------------------------------------- +# TestS3StorageDriverInit — no S3 calls; MagicMock client is sufficient +# --------------------------------------------------------------------------- + + +class TestS3StorageDriverInit: + def test_default_name(self) -> None: + driver = S3StorageDriver( + client=MagicMock(spec=S3StorageDriverClient), bucket=BUCKET + ) + assert driver.name() == "aws.s3driver" + + def test_custom_name(self) -> None: + driver = S3StorageDriver( + client=MagicMock(spec=S3StorageDriverClient), + bucket=BUCKET, + driver_name="my-s3", + ) + assert driver.name() == "my-s3" + + def test_type(self) -> None: + driver = S3StorageDriver( + client=MagicMock(spec=S3StorageDriverClient), bucket=BUCKET + ) + assert driver.type() == "aws.s3driver" + + +# --------------------------------------------------------------------------- +# TestS3StorageDriverKeyConstruction +# --------------------------------------------------------------------------- + + +class TestS3StorageDriverKeyConstruction: + async def test_key_context_none(self, driver_client: S3StorageDriverClient) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + [claim] = await driver.store(make_store_context(), [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["key"] == f"v0/d/sha256/{expected_hash}" + + async def test_key_context_workflow( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_workflow_context(namespace="ns1", workflow_id="wf1") + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["key"] == f"v0/ns/ns1/wfi/wf1/d/sha256/{expected_hash}" + + async def test_key_context_workflow_activity( + self, driver_client: S3StorageDriverClient + ) -> None: + """workflow_id takes priority over activity_id in ActivitySerializationContext.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_activity_context( + namespace="ns1", workflow_id="wf1", activity_id="act1" + ) + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["key"] == f"v0/ns/ns1/wfi/wf1/d/sha256/{expected_hash}" + + async def test_key_context_standalone_activityt( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_activity_context(namespace="ns1", activity_id="act1", workflow_id=None) + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["key"] == f"v0/ns/ns1/aci/act1/d/sha256/{expected_hash}" + + async def test_key_preserves_case( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_workflow_context(namespace="MyNamespace", workflow_id="MyWorkflow") + ) + [claim] = await driver.store(ctx, [payload]) + key = claim.claim_data["key"] + assert "MyNamespace" in key + assert "MyWorkflow" in key + + async def test_key_urlencodes_workflow_id_with_slashes( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_workflow_context(namespace="ns1", workflow_id="order/123/v2") + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"v0/ns/ns1/wfi/order%2F123%2Fv2/d/sha256/{expected_hash}" + ) + + async def test_key_urlencodes_workflow_id_with_special_chars( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_workflow_context(namespace="ns1", workflow_id="wf#1 &foo=bar") + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"v0/ns/ns1/wfi/wf%231%20%26foo%3Dbar/d/sha256/{expected_hash}" + ) + + async def test_key_urlencodes_activity_id( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_activity_context( + namespace="ns1", activity_id="act/1#2", workflow_id=None + ) + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"v0/ns/ns1/aci/act%2F1%232/d/sha256/{expected_hash}" + ) + + async def test_key_urlencodes_namespace( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + ctx = make_store_context( + make_workflow_context(namespace="my/ns#1", workflow_id="wf1") + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"v0/ns/my%2Fns%231/wfi/wf1/d/sha256/{expected_hash}" + ) + + async def test_key_urlencoded_roundtrip( + self, driver_client: S3StorageDriverClient + ) -> None: + """Payloads stored with special-char IDs can be retrieved correctly.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("special-char-roundtrip") + ctx = make_store_context( + make_workflow_context(namespace="ns/1", workflow_id="wf/2#3") + ) + [claim] = await driver.store(ctx, [payload]) + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert retrieved == payload + + +# --------------------------------------------------------------------------- +# TestS3StorageDriverStoreRetrieve +# --------------------------------------------------------------------------- + + +class TestS3StorageDriverStoreRetrieve: + async def test_store_returns_claim_with_bucket_key_and_hash( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload() + [claim] = await driver.store(make_store_context(), [payload]) + assert claim.claim_data["bucket"] == BUCKET + assert "key" in claim.claim_data + assert claim.claim_data["hash_algorithm"] == "sha256" + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["hash_value"] == expected_hash + + async def test_roundtrip_single_payload( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("round-trip value") + [claim] = await driver.store(make_store_context(), [payload]) + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert retrieved == payload + + async def test_roundtrip_multiple_payloads( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payloads = [make_payload(f"value-{i}") for i in range(3)] + claims = await driver.store(make_store_context(), payloads) + retrieved = await driver.retrieve(StorageDriverRetrieveContext(), claims) + assert retrieved == payloads + + async def test_empty_payloads_returns_empty_list( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + assert await driver.store(make_store_context(), []) == [] + assert await driver.retrieve(StorageDriverRetrieveContext(), []) == [] + + async def test_roundtrip_multipart_payload( + self, aioboto3_client: S3Client, driver_client: S3StorageDriverClient + ) -> None: + """Payloads above the 8 MiB multipart threshold are uploaded via multipart + and retrieved correctly. The S3 ETag for multipart objects contains a '-' + suffix (e.g. 'hash-2'), which we assert to confirm multipart was used.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + # Slightly above the default 8 MiB multipart_threshold + large_payload = make_payload("x" * (9 * 1024 * 1024)) + [claim] = await driver.store(make_store_context(), [large_payload]) + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert retrieved == large_payload + head = await aioboto3_client.head_object( + Bucket=BUCKET, Key=claim.claim_data["key"] + ) + assert "-" in head["ETag"], "Expected a multipart ETag (hash-N format)" + + async def test_content_addressable_deduplication( + self, aioboto3_client: S3Client, driver_client: S3StorageDriverClient + ) -> None: + """Two identical payloads produce the same S3 key; only one object is stored.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("same-value") + claims = await driver.store(make_store_context(), [payload, payload]) + assert claims[0].claim_data["key"] == claims[1].claim_data["key"] + response = await aioboto3_client.list_objects_v2(Bucket=BUCKET) + assert response["KeyCount"] == 1 + + async def test_skips_upload_when_key_exists( + self, counting_driver_client: CountingDriverClient + ) -> None: + """When a key already exists in S3, put_object is not called again.""" + driver = S3StorageDriver(client=counting_driver_client, bucket=BUCKET) + payload = make_payload("upload-once") + + await driver.store(make_store_context(), [payload]) + assert counting_driver_client.put_object_count == 1 + + await driver.store(make_store_context(), [payload]) + assert ( + counting_driver_client.put_object_count == 1 + ), "put_object should not be called for an existing key" + + async def test_skips_upload_preserves_data( + self, driver_client: S3StorageDriverClient + ) -> None: + """Storing the same payload twice returns correct data on retrieve.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("preserve-me") + + [claim1] = await driver.store(make_store_context(), [payload]) + [claim2] = await driver.store(make_store_context(), [payload]) + assert claim1 == claim2 + + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim2]) + assert retrieved == payload + + async def test_retrieve_validates_hash( + self, driver_client: S3StorageDriverClient + ) -> None: + """Retrieve raises RuntimeError when the hash in the claim doesn't match.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("check-integrity") + [claim] = await driver.store(make_store_context(), [payload]) + + tampered_claim = StorageDriverClaim( + claim_data={ + **claim.claim_data, + "hash_value": "0" * 64, + }, + ) + with pytest.raises( + ValueError, + match=r"S3StorageDriver integrity check failed \[bucket=.+, key=.+\]: expected sha256:.+, got sha256:.+", + ): + await driver.retrieve(StorageDriverRetrieveContext(), [tampered_claim]) + + async def test_retrieve_rejects_unsupported_hash_algorithm( + self, driver_client: S3StorageDriverClient + ) -> None: + """Retrieve raises ValueError when the claim specifies a non-sha256 algorithm.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("unsupported-algo") + [claim] = await driver.store(make_store_context(), [payload]) + + bad_claim = StorageDriverClaim( + claim_data={ + **claim.claim_data, + "hash_algorithm": "md5", + }, + ) + with pytest.raises( + ValueError, + match=r"S3StorageDriver unsupported hash algorithm \[bucket=.+, key=.+\]: expected sha256, got md5", + ): + await driver.retrieve(StorageDriverRetrieveContext(), [bad_claim]) + + async def test_retrieve_without_hash_in_claim( + self, driver_client: S3StorageDriverClient + ) -> None: + """Claims without hash fields still retrieve successfully (backward compat).""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payload = make_payload("no-hash-claim") + [claim] = await driver.store(make_store_context(), [payload]) + + legacy_claim = StorageDriverClaim( + claim_data={ + "bucket": claim.claim_data["bucket"], + "key": claim.claim_data["key"], + }, + ) + [retrieved] = await driver.retrieve( + StorageDriverRetrieveContext(), [legacy_claim] + ) + assert retrieved == payload + + +# --------------------------------------------------------------------------- +# TestS3StorageDriverBucketCallable +# --------------------------------------------------------------------------- + + +class TestS3StorageDriverBucketCallable: + async def test_callable_selector_routes_bucket( + self, aioboto3_client: S3Client, driver_client: S3StorageDriverClient + ) -> None: + other_bucket = "other-bucket" + await aioboto3_client.create_bucket(Bucket=other_bucket) + driver = S3StorageDriver( + client=driver_client, + bucket=lambda ctx, p: other_bucket, + ) + [claim] = await driver.store(make_store_context(), [make_payload()]) + assert claim.claim_data["bucket"] == other_bucket + + async def test_selector_called_per_payload( + self, driver_client: S3StorageDriverClient + ) -> None: + call_count = 0 + + def counting_selector(_ctx: StorageDriverStoreContext, _p: Payload) -> str: + nonlocal call_count + call_count += 1 + return BUCKET + + driver = S3StorageDriver(client=driver_client, bucket=counting_selector) + await driver.store( + make_store_context(), [make_payload(f"v{i}") for i in range(3)] + ) + assert call_count == 3 + + async def test_selector_routes_by_activity_task_queue( + self, aioboto3_client: S3Client, driver_client: S3StorageDriverClient + ) -> None: + """bucket callable can route payloads to different buckets by activity task queue.""" + bucket_a = "bucket-queue-a" + bucket_b = "bucket-queue-b" + await aioboto3_client.create_bucket(Bucket=bucket_a) + await aioboto3_client.create_bucket(Bucket=bucket_b) + + queue_buckets = {"queue-a": bucket_a, "queue-b": bucket_b} + + def queue_selector(ctx: StorageDriverStoreContext, p: Payload) -> str: + del p + if isinstance(ctx.serialization_context, ActivitySerializationContext): + queue = ctx.serialization_context.activity_task_queue + if queue and queue in queue_buckets: + return queue_buckets[queue] + return BUCKET + + driver = S3StorageDriver(client=driver_client, bucket=queue_selector) + + ctx_a = make_store_context( + make_activity_context( + namespace="ns1", + activity_id="act1", + workflow_id="wf1", + activity_task_queue="queue-a", + ) + ) + [claim_a] = await driver.store(ctx_a, [make_payload("payload-a")]) + assert claim_a.claim_data["bucket"] == bucket_a + + ctx_b = make_store_context( + make_activity_context( + namespace="ns1", + activity_id="act2", + workflow_id="wf1", + activity_task_queue="queue-b", + ) + ) + [claim_b] = await driver.store(ctx_b, [make_payload("payload-b")]) + assert claim_b.claim_data["bucket"] == bucket_b + + async def test_selector_receives_context_and_payload( + self, driver_client: S3StorageDriverClient + ) -> None: + received: list[tuple[StorageDriverStoreContext, Payload]] = [] + + def capturing_selector(ctx: StorageDriverStoreContext, p: Payload) -> str: + received.append((ctx, p)) + return BUCKET + + payload = make_payload() + store_ctx = make_store_context(make_workflow_context()) + driver = S3StorageDriver(client=driver_client, bucket=capturing_selector) + await driver.store(store_ctx, [payload]) + + assert len(received) == 1 + assert received[0][0] is store_ctx + assert received[0][1] == payload + + +# --------------------------------------------------------------------------- +# TestS3StorageDriverErrors +# --------------------------------------------------------------------------- + + +class TestS3StorageDriverErrors: + async def test_store_nonexistent_bucket_raises( + self, driver_client: S3StorageDriverClient + ) -> None: + bucket = "does-not-exist" + payload = make_payload() + driver = S3StorageDriver(client=driver_client, bucket=bucket) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + expected_key = f"v0/d/sha256/{expected_hash}" + with pytest.raises(RuntimeError) as exc_info: + await driver.store(make_store_context(), [payload]) + assert ( + str(exc_info.value) + == f"S3StorageDriver store failed [bucket={bucket}, key={expected_key}]" + ) + assert isinstance(exc_info.value.__cause__, ClientError) + assert ( + exc_info.value.__cause__.response.get("Error", {}).get("Code") + == "NoSuchBucket" + ) + + async def test_retrieve_nonexistent_key_raises( + self, driver_client: S3StorageDriverClient + ) -> None: + key = "/d/sha256/nonexistent" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + claim = StorageDriverClaim(claim_data={"bucket": BUCKET, "key": key}) + with pytest.raises(RuntimeError) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert ( + str(exc_info.value) + == f"S3StorageDriver retrieve failed [bucket={BUCKET}, key={key}]" + ) + assert isinstance(exc_info.value.__cause__, ClientError) + assert ( + exc_info.value.__cause__.response.get("Error", {}).get("Code") + == "NoSuchKey" + ) + + async def test_retrieve_nonexistent_bucket_raises( + self, driver_client: S3StorageDriverClient + ) -> None: + bucket = "does-not-exist" + key = "/d/sha256/anything" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + claim = StorageDriverClaim(claim_data={"bucket": bucket, "key": key}) + with pytest.raises(RuntimeError) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert ( + str(exc_info.value) + == f"S3StorageDriver retrieve failed [bucket={bucket}, key={key}]" + ) + assert isinstance(exc_info.value.__cause__, ClientError) + assert ( + exc_info.value.__cause__.response.get("Error", {}).get("Code") + == "NoSuchBucket" + ) + + async def test_bucket_callable_exception_propagates( + self, driver_client: S3StorageDriverClient + ) -> None: + selector = MagicMock(side_effect=RuntimeError("selector failed")) + driver = S3StorageDriver(client=driver_client, bucket=selector) + with pytest.raises(RuntimeError, match="selector failed"): + await driver.store(make_store_context(), [make_payload()]) + + def test_max_payload_size_zero_raises(self) -> None: + with pytest.raises( + ValueError, match="max_payload_size must be greater than zero" + ): + S3StorageDriver( + client=MagicMock(spec=S3StorageDriverClient), + bucket=BUCKET, + max_payload_size=0, + ) + + def test_max_payload_size_negative_raises(self) -> None: + with pytest.raises( + ValueError, match="max_payload_size must be greater than zero" + ): + S3StorageDriver( + client=MagicMock(spec=S3StorageDriverClient), + bucket=BUCKET, + max_payload_size=-1, + ) + + async def test_payload_exceeds_max_size_raises( + self, driver_client: S3StorageDriverClient + ) -> None: + driver = S3StorageDriver( + client=driver_client, bucket=BUCKET, max_payload_size=10 + ) + with pytest.raises( + ValueError, + match=r"Payload size \d+ bytes exceeds the configured max_payload_size of 10 bytes", + ): + await driver.store(make_store_context(), [make_payload("exceeds-limit")]) + + async def test_payload_at_max_size_succeeds( + self, driver_client: S3StorageDriverClient + ) -> None: + payload = make_payload("x") + driver = S3StorageDriver( + client=driver_client, + bucket=BUCKET, + max_payload_size=len(payload.SerializeToString()), + ) + await driver.store(make_store_context(), [payload]) + + +# --------------------------------------------------------------------------- +# TestS3StorageDriverConcurrency +# --------------------------------------------------------------------------- + + +class _AsyncBarrier: + """Minimal asyncio.Barrier equivalent for Python <3.11.""" + + def __init__(self, parties: int) -> None: + self._parties = parties + self._count = 0 + self._event = asyncio.Event() + + async def wait(self) -> None: + self._count += 1 + if self._count >= self._parties: + self._event.set() + else: + await self._event.wait() + + +def _barrier_wrapper( + fn: Callable[..., Coroutine[Any, Any, Any]], barrier: _AsyncBarrier +): + """Wrap an async method to wait at a barrier before proceeding. + + All concurrent callers must reach the barrier before any of them continue. + If the calls are sequential, the barrier will never be satisfied and the + test times out. + """ + + @wraps(fn) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + await asyncio.wait_for(barrier.wait(), timeout=5) + return await fn(*args, **kwargs) + + return wrapper + + +class TestS3StorageDriverConcurrency: + async def test_store_payloads_concurrently( + self, driver_client: S3StorageDriverClient + ) -> None: + """All uploads must be in-flight concurrently. + + A barrier sized to ``num_payloads`` blocks each upload until every + upload has started. If the driver dispatches sequentially the barrier + is never satisfied and the test times out. + """ + num_payloads = 5 + barrier = _AsyncBarrier(num_payloads) + driver_client.put_object = _barrier_wrapper(driver_client.put_object, barrier) # type: ignore[method-assign] + + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payloads = [make_payload(f"concurrent-store-{i}") for i in range(num_payloads)] + + claims = await driver.store(make_store_context(), payloads) + assert len(claims) == num_payloads + + async def test_retrieve_payloads_concurrently( + self, driver_client: S3StorageDriverClient + ) -> None: + """All downloads must be in-flight concurrently.""" + num_payloads = 5 + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payloads = [ + make_payload(f"concurrent-retrieve-{i}") for i in range(num_payloads) + ] + claims = await driver.store(make_store_context(), payloads) + + barrier = _AsyncBarrier(num_payloads) + driver_client.get_object = _barrier_wrapper(driver_client.get_object, barrier) # type: ignore[method-assign] + + retrieved = await driver.retrieve(StorageDriverRetrieveContext(), claims) + assert retrieved == payloads + + async def test_store_cancels_remaining_on_failure( + self, driver_client: S3StorageDriverClient + ) -> None: + """When one upload fails, all other in-flight uploads are cancelled.""" + faulty_client = FailOnceDriverClient( + delegate=driver_client, + fail_on="object_exists", + ) + driver = S3StorageDriver(client=faulty_client, bucket=BUCKET) + payloads = [make_payload(f"cancel-store-{i}") for i in range(3)] + + with pytest.raises( + RuntimeError, + match=r"S3StorageDriver store failed \[bucket=.+, key=.+\]", + ) as exc_info: + await driver.store(make_store_context(), payloads) + + assert isinstance(exc_info.value.__cause__, ConnectionError) + assert str(exc_info.value.__cause__) == "S3 connection lost" + assert ( + len(faulty_client.cancelled) == 2 + ), "Expected 2 remaining tasks to be cancelled" + + async def test_retrieve_cancels_remaining_on_failure( + self, driver_client: S3StorageDriverClient + ) -> None: + """When one download fails, all other in-flight downloads are cancelled.""" + driver = S3StorageDriver(client=driver_client, bucket=BUCKET) + payloads = [make_payload(f"cancel-retrieve-{i}") for i in range(3)] + claims = await driver.store(make_store_context(), payloads) + + faulty_client = FailOnceDriverClient( + delegate=driver_client, + fail_on="get_object", + ) + driver = S3StorageDriver(client=faulty_client, bucket=BUCKET) + + with pytest.raises( + RuntimeError, + match=r"S3StorageDriver retrieve failed \[bucket=.+, key=.+\]", + ) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), claims) + + assert isinstance(exc_info.value.__cause__, ConnectionError) + assert str(exc_info.value.__cause__) == "S3 connection lost" + assert ( + len(faulty_client.cancelled) == 2 + ), "Expected 2 remaining tasks to be cancelled" diff --git a/tests/contrib/aws/s3driver/test_s3driver_worker.py b/tests/contrib/aws/s3driver/test_s3driver_worker.py new file mode 100644 index 000000000..87ab73736 --- /dev/null +++ b/tests/contrib/aws/s3driver/test_s3driver_worker.py @@ -0,0 +1,414 @@ +"""Worker integration tests for S3StorageDriver key structure. + +Runs real Temporal workflows against a real worker (backed by a moto S3 +server) and asserts the S3 object key structure produced for each Temporal +primitive: workflow input/output, activity input/output, signals, queries, +updates, and child workflows. +""" + +from __future__ import annotations + +import dataclasses +import hashlib +import uuid +from collections.abc import AsyncIterator +from datetime import timedelta + +import aioboto3 +import pytest +from types_aiobotocore_s3.client import S3Client + +import temporalio.converter +from temporalio.client import Client, WorkflowFailureError +from temporalio.contrib.aws.s3driver import S3StorageDriver +from temporalio.contrib.aws.s3driver.aioboto3 import new_aioboto3_client +from temporalio.converter import ExternalStorage, JSONPlainPayloadConverter +from temporalio.exceptions import ActivityError, ApplicationError +from temporalio.testing import WorkflowEnvironment +from tests.contrib.aws.s3driver.conftest import BUCKET, REGION +from tests.contrib.aws.s3driver.workflows import ( + LARGE, + ChildWorkflow, + DocumentIngestionWorkflow, + LargeIOWorkflow, + LargeOutputNoRetryWorkflow, + ModelTrainingWorkflow, + OrderFulfillmentWorkflow, + ParentWithChildWorkflow, + PaymentProcessingWorkflow, + SignalQueryUpdateWorkflow, + download_document, + extract_text, + index_document, + large_io_activity, + large_output_activity, +) +from tests.helpers import new_worker + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +_THRESHOLD = 256 # bytes — low so all test payloads are offloaded + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +async def tmprl_client( + env: WorkflowEnvironment, aioboto3_client: S3Client +) -> AsyncIterator[Client]: + """Temporal client wired with ExternalStorage backed by the moto S3 server.""" + driver = S3StorageDriver(client=new_aioboto3_client(aioboto3_client), bucket=BUCKET) + yield await Client.connect( + env.client.service_client.config.target_host, + namespace=env.client.namespace, + data_converter=dataclasses.replace( + temporalio.converter.default(), + external_storage=ExternalStorage( + drivers=[driver], + payload_size_threshold=_THRESHOLD, + ), + ), + ) + + +# --------------------------------------------------------------------------- +# Helper +# --------------------------------------------------------------------------- + + +async def _list_keys(aioboto3_client: S3Client) -> list[str]: + resp = await aioboto3_client.list_objects_v2(Bucket=BUCKET) + return sorted( + key for obj in resp.get("Contents", []) if (key := obj.get("Key")) is not None + ) + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +async def test_s3_driver_workflow_input_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_workflow_output_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + result = await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + "small", # small input stays inline; workflow returns LARGE + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + assert result == LARGE + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_workflow_activity_input_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + LARGE, # passed through as the activity's input + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/" in keys[0] + assert ( + "/aci/" not in keys[0] + ), "Activity input should use workflow_id, not activity_id" + + +async def test_s3_driver_workflow_activity_output_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + "small", # small input; activity returns LARGE + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_signal_arg_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, SignalQueryUpdateWorkflow) as worker: + handle = await tmprl_client.start_workflow( + SignalQueryUpdateWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + await handle.signal(SignalQueryUpdateWorkflow.finish, LARGE) + await handle.result() + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_query_result_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, SignalQueryUpdateWorkflow) as worker: + handle = await tmprl_client.start_workflow( + SignalQueryUpdateWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + result = await handle.query(SignalQueryUpdateWorkflow.get_value, LARGE) + assert result == LARGE + await handle.signal(SignalQueryUpdateWorkflow.finish, "done") + await handle.result() + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_update_result_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, SignalQueryUpdateWorkflow) as worker: + handle = await tmprl_client.start_workflow( + SignalQueryUpdateWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + result = await handle.execute_update(SignalQueryUpdateWorkflow.do_update, LARGE) + assert result == LARGE + await handle.signal(SignalQueryUpdateWorkflow.finish, "done") + await handle.result() + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_child_workflow_input_key( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, ParentWithChildWorkflow, ChildWorkflow + ) as worker: + await tmprl_client.execute_workflow( + ParentWithChildWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + child_workflow_id = f"{workflow_id}-child" + assert f"/ns/default/wfi/{child_workflow_id}/d/sha256/" in keys[0] + + +async def test_s3_driver_identified_casing( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + workflow_id = f"MyWorkflow-{uuid.uuid4()}" + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + assert len(keys) == 1 + assert "/ns/default/" in keys[0], "Namespace segment should be present" + assert ( + f"/wfi/{workflow_id}/" in keys[0] + ), "Workflow ID should preserve original case in the key" + + +async def test_s3_driver_content_dedup( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + """Document ingestion produces exactly two distinct S3 keys, even though + the payloads are repeatedly passed to different activities.""" + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, + DocumentIngestionWorkflow, + activities=[download_document, extract_text, index_document], + ) as worker: + await tmprl_client.execute_workflow( + DocumentIngestionWorkflow.run, + "doc-001", + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + assert len(keys) == 2 + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[0] + assert f"/ns/default/wfi/{workflow_id}/d/sha256/" in keys[1] + assert keys[0] != keys[1] + + +async def test_s3_driver_single_workflow_same_key_namespace( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + """A training job started with a large config, injected with large override + parameters mid-run, and polled for large metrics — all produce S3 keys + under the same workflow ID prefix, regardless of which primitive carried + the payload.""" + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, ModelTrainingWorkflow) as worker: + handle = await tmprl_client.start_workflow( + ModelTrainingWorkflow.run, + LARGE, # large training config as workflow input + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + metrics = await handle.execute_update( + ModelTrainingWorkflow.get_metrics, "checkpoint-1" + ) + assert metrics is not None + await handle.signal(ModelTrainingWorkflow.apply_overrides, LARGE) + await handle.signal(ModelTrainingWorkflow.complete) + await handle.result() + keys = await _list_keys(aioboto3_client) + # LARGE (input + signal arg) and LARGE_2 (metrics result) deduplicate to + # two distinct keys — both anchored under the same workflow ID prefix. + assert len(keys) == 2 + assert all(f"/ns/default/wfi/{workflow_id}/" in key for key in keys) + + +async def test_s3_driver_parent_child_independent_key_namespaces( + tmprl_client: Client, aioboto3_client: S3Client +) -> None: + """An order fulfillment workflow spawns a child payment processor, passes it + a large order payload, and returns the child's large payment confirmation. + Each workflow accumulates S3 keys under its own workflow ID prefix — + parent and child key namespaces are fully independent.""" + workflow_id = str(uuid.uuid4()) + payment_id = f"{workflow_id}-payment" + async with new_worker( + tmprl_client, OrderFulfillmentWorkflow, PaymentProcessingWorkflow + ) as worker: + await tmprl_client.execute_workflow( + OrderFulfillmentWorkflow.run, + LARGE, # large order details passed to parent and forwarded to child + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(aioboto3_client) + parent_prefix = f"/ns/default/wfi/{workflow_id}/d/" + child_prefix = f"/ns/default/wfi/{payment_id}/d/" + parent_keys = [k for k in keys if parent_prefix in k] + child_keys = [k for k in keys if child_prefix in k] + # The parent stores its input (LARGE) and the child's result propagated + # back (LARGE_2) under the parent's prefix → 2 keys. + # The child stores its input (LARGE) and its result (LARGE_2) under the + # child's prefix → 2 keys. + assert len(parent_keys) == 2 + assert len(child_keys) == 2 + + +async def test_s3_store_failure_surfaces_in_workflow_history( + env: WorkflowEnvironment, moto_server_url: str +) -> None: + """Verifies that an S3 store failure (nonexistent bucket) produces a + RuntimeError with bucket and key context that is visible in Temporal + workflow history via the WorkflowFailureError cause chain.""" + bad_bucket = "nonexistent-bucket" + session = aioboto3.Session() + async with session.client( + "s3", + region_name=REGION, + endpoint_url=moto_server_url, + aws_access_key_id="testing", + aws_secret_access_key="testing", + ) as client: + driver = S3StorageDriver(client=new_aioboto3_client(client), bucket=bad_bucket) + bad_client = await Client.connect( + env.client.service_client.config.target_host, + namespace=env.client.namespace, + data_converter=dataclasses.replace( + temporalio.converter.default(), + external_storage=ExternalStorage( + drivers=[driver], + payload_size_threshold=_THRESHOLD, + ), + ), + ) + workflow_id = str(uuid.uuid4()) + async with new_worker( + bad_client, LargeOutputNoRetryWorkflow, activities=[large_output_activity] + ) as worker: + with pytest.raises(WorkflowFailureError) as exc_info: + await bad_client.execute_workflow( + LargeOutputNoRetryWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + + large_payload = JSONPlainPayloadConverter().to_payload(LARGE) + assert large_payload is not None + expected_hash = hashlib.sha256(large_payload.SerializeToString()).hexdigest() + expected_key = f"v0/ns/default/wfi/{workflow_id}/d/sha256/{expected_hash}" + + assert isinstance(exc_info.value, WorkflowFailureError) + activity_error = exc_info.value.__cause__ + assert isinstance(activity_error, ActivityError) + app_error = activity_error.__cause__ + assert isinstance(app_error, ApplicationError) + assert app_error.type == "RuntimeError" + assert ( + app_error.message + == f"S3StorageDriver store failed [bucket={bad_bucket}, key={expected_key}]" + ) diff --git a/tests/contrib/aws/s3driver/workflows.py b/tests/contrib/aws/s3driver/workflows.py new file mode 100644 index 000000000..4f4b43099 --- /dev/null +++ b/tests/contrib/aws/s3driver/workflows.py @@ -0,0 +1,223 @@ +"""Workflow and activity definitions for test_s3driver.py integration tests. + +Kept in a separate module so the workflow sandbox does not encounter +aioboto3/aiobotocore/botocore/urllib3 imports when preparing workflow classes. +""" + +from __future__ import annotations + +from datetime import timedelta + +from temporalio import activity, workflow +from temporalio.common import RetryPolicy + +LARGE = "x" * 356 # ~358 bytes as a JSON string, above the 256-byte test threshold +LARGE_2 = "y" * 356 # distinct large payload with a different SHA-256 hash + + +@activity.defn +async def large_io_activity(_data: str) -> str: + return LARGE + + +@activity.defn +async def large_output_activity() -> str: + """Returns a large payload with no retries; used to test S3 store failures.""" + return LARGE + + +@workflow.defn +class LargeOutputNoRetryWorkflow: + """Executes a single activity that returns a large payload with no retries. + + Used to verify that S3 store failures surface in workflow history without + retries masking the error. + """ + + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + large_output_activity, + schedule_to_close_timeout=timedelta(seconds=10), + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + +@workflow.defn +class LargeIOWorkflow: + """Passes its input to an activity and returns a large output.""" + + @workflow.run + async def run(self, data: str) -> str: + await workflow.execute_activity( + large_io_activity, + data, + schedule_to_close_timeout=timedelta(seconds=10), + ) + return LARGE + + +@activity.defn +async def download_document(document_id: str) -> str: + """Downloads the raw document content from remote storage.""" + del document_id + return LARGE # simulates a large raw document + + +@activity.defn +async def extract_text(raw_content: str) -> str: + """Extracts and normalizes text from the raw document content.""" + del raw_content + return LARGE_2 # simulates extracted text — different content, different hash + + +@activity.defn +async def index_document(text: str) -> str: + """Indexes the extracted text into the search index. Returns the index record ID.""" + del text + return "idx-00001" # small confirmation — not offloaded to external storage + + +@workflow.defn +class DocumentIngestionWorkflow: + """Downloads a document, extracts its text, and indexes it for search. + + Illustrates how large intermediate payloads (raw document content, extracted + text) are transparently offloaded to S3 between activity boundaries without + any special handling in the workflow code. + """ + + @workflow.run + async def run(self, document_id: str) -> str: + raw_content = await workflow.execute_activity( + download_document, + document_id, + schedule_to_close_timeout=timedelta(seconds=10), + ) + extracted_text = await workflow.execute_activity( + extract_text, + raw_content, + schedule_to_close_timeout=timedelta(seconds=10), + ) + return await workflow.execute_activity( + index_document, + extracted_text, + schedule_to_close_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class ChildWorkflow: + @workflow.run + async def run(self, data: str) -> str: + return f"{len(data)}" + + +@workflow.defn +class ParentWithChildWorkflow: + """Delegates work to a child workflow whose ID is {parent_id}-child.""" + + @workflow.run + async def run(self) -> str: + child_id = f"{workflow.info().workflow_id}-child" + return await workflow.execute_child_workflow( + ChildWorkflow.run, + LARGE, + id=child_id, + execution_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class PaymentProcessingWorkflow: + """Processes payment for an order and returns a large payment confirmation. + + Intended to be spawned as a child of OrderFulfillmentWorkflow. + """ + + @workflow.run + async def run(self, order_details: str) -> str: + del order_details + return LARGE_2 # payment confirmation + + +@workflow.defn +class OrderFulfillmentWorkflow: + """Coordinates order fulfillment by delegating payment to a child workflow. + + Passes the large order details to a PaymentProcessingWorkflow child whose ID + is {parent_id}-payment, then returns the child's payment confirmation. + """ + + @workflow.run + async def run(self, order_details: str) -> str: + payment_id = f"{workflow.info().workflow_id}-payment" + return await workflow.execute_child_workflow( + PaymentProcessingWorkflow.run, + order_details, + id=payment_id, + execution_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class ModelTrainingWorkflow: + """Simulates a long-running ML training job. + + Accepts a large training config as input, allows the caller to inject + override parameters mid-run via signal, and exposes intermediate metrics + via an update. Demonstrates that large payloads crossing all three + primitive boundaries (input, signal arg, update result) are stored under + the same workflow ID prefix in S3. + """ + + def __init__(self) -> None: + self._overrides_received = False + self._done = False + + @workflow.run + async def run(self, training_config: str) -> str: + del training_config + await workflow.wait_condition(lambda: self._done) + return LARGE # final training summary + + @workflow.signal + async def apply_overrides(self, override_params: str) -> None: + """Injects updated hyperparameters into the running training job.""" + del override_params + self._overrides_received = True + + @workflow.signal + async def complete(self) -> None: + self._done = True + + @workflow.update + async def get_metrics(self, checkpoint_id: str) -> str: + """Returns the current training metrics snapshot.""" + del checkpoint_id + return LARGE_2 # large metrics payload + + +@workflow.defn +class SignalQueryUpdateWorkflow: + """Long-running workflow that accepts a signal, query, and update.""" + + def __init__(self) -> None: + self._done = False + + @workflow.run + async def run(self) -> str: + await workflow.wait_condition(lambda: self._done) + return LARGE + + @workflow.signal + async def finish(self, _data: str) -> None: + self._done = True + + @workflow.query + def get_value(self, _data: str) -> str: + return LARGE + + @workflow.update + async def do_update(self, _data: str) -> str: + return LARGE diff --git a/uv.lock b/uv.lock index a70f170b2..c63faefad 100644 --- a/uv.lock +++ b/uv.lock @@ -8,6 +8,51 @@ resolution-markers = [ "python_full_version < '3.11'", ] +[[package]] +name = "aioboto3" +version = "15.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiobotocore", extra = ["boto3"] }, + { name = "aiofiles" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/01/92e9ab00f36e2899315f49eefcd5b4685fbb19016c7f19a9edf06da80bb0/aioboto3-15.5.0.tar.gz", hash = "sha256:ea8d8787d315594842fbfcf2c4dce3bac2ad61be275bc8584b2ce9a3402a6979", size = 255069, upload-time = "2025-10-30T13:37:16.122Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/3e/e8f5b665bca646d43b916763c901e00a07e40f7746c9128bdc912a089424/aioboto3-15.5.0-py3-none-any.whl", hash = "sha256:cc880c4d6a8481dd7e05da89f41c384dbd841454fc1998ae25ca9c39201437a6", size = 35913, upload-time = "2025-10-30T13:37:14.549Z" }, +] + +[[package]] +name = "aiobotocore" +version = "2.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "aioitertools" }, + { name = "botocore" }, + { name = "jmespath" }, + { name = "multidict" }, + { name = "python-dateutil" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/94/2e4ec48cf1abb89971cb2612d86f979a6240520f0a659b53a43116d344dc/aiobotocore-2.25.1.tar.gz", hash = "sha256:ea9be739bfd7ece8864f072ec99bb9ed5c7e78ebb2b0b15f29781fbe02daedbc", size = 120560, upload-time = "2025-10-28T22:33:21.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/2a/d275ec4ce5cd0096665043995a7d76f5d0524853c76a3d04656de49f8808/aiobotocore-2.25.1-py3-none-any.whl", hash = "sha256:eb6daebe3cbef5b39a0bb2a97cffbe9c7cb46b2fcc399ad141f369f3c2134b1f", size = 86039, upload-time = "2025-10-28T22:33:19.949Z" }, +] + +[package.optional-dependencies] +boto3 = [ + { name = "boto3" }, +] + +[[package]] +name = "aiofiles" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -22,14 +67,14 @@ name = "aiohttp" version = "3.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "aiohappyeyeballs", marker = "python_full_version < '3.14'" }, - { name = "aiosignal", marker = "python_full_version < '3.14'" }, + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, { name = "async-timeout", marker = "python_full_version < '3.11'" }, - { name = "attrs", marker = "python_full_version < '3.14'" }, - { name = "frozenlist", marker = "python_full_version < '3.14'" }, - { name = "multidict", marker = "python_full_version < '3.14'" }, - { name = "propcache", marker = "python_full_version < '3.14'" }, - { name = "yarl", marker = "python_full_version < '3.14'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } wheels = [ @@ -137,12 +182,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, ] +[[package]] +name = "aioitertools" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/3c/53c4a17a05fb9ea2313ee1777ff53f5e001aefd5cc85aa2f4c2d982e1e38/aioitertools-0.13.0.tar.gz", hash = "sha256:620bd241acc0bbb9ec819f1ab215866871b4bbd1f73836a55f799200ee86950c", size = 19322, upload-time = "2025-11-06T22:17:07.609Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl", hash = "sha256:0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be", size = 24182, upload-time = "2025-11-06T22:17:06.502Z" }, +] + [[package]] name = "aiosignal" version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "frozenlist", marker = "python_full_version < '3.14'" }, + { name = "frozenlist" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } @@ -192,6 +246,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "antlr4-python3-runtime" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/33/5f/2cdf6f7aca3b20d3f316e9f505292e1f256a32089bd702034c29ebde6242/antlr4_python3_runtime-4.13.2.tar.gz", hash = "sha256:909b647e1d2fc2b70180ac586df3933e38919c85f98ccc656a96cd3f25ef3916", size = 117467, upload-time = "2024-08-03T19:00:12.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/03/a851e84fcbb85214dc637b6378121ef9a0dd61b4c65264675d8a5c9b1ae7/antlr4_python3_runtime-4.13.2-py3-none-any.whl", hash = "sha256:fe3835eb8d33daece0e799090eda89719dbccee7aa39ef94eed3818cafa5a7e8", size = 144462, upload-time = "2024-08-03T19:00:11.134Z" }, +] + [[package]] name = "anyio" version = "4.11.0" @@ -246,6 +309,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/02/ff/1175b0b7371e46244032d43a56862d0af455823b5280a50c63d99cc50f18/automat-25.4.16-py3-none-any.whl", hash = "sha256:04e9bce696a8d5671ee698005af6e5a9fa15354140a87f4870744604dcdd3ba1", size = 42842, upload-time = "2025-04-16T20:12:14.447Z" }, ] +[[package]] +name = "aws-sam-translator" +version = "1.103.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/e3/82cc7240504b1c0d2d7ed7028b05ccceedb02932b8638c61a8372a5d875f/aws_sam_translator-1.103.0.tar.gz", hash = "sha256:8317b72ef412db581dc7846932a44dfc1729adea578d9307a3e6ece46a7882ca", size = 344881, upload-time = "2025-11-21T19:50:51.818Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/86/6414c215ff0a10b33bf89622951e7d4413106320657535d2ba0e4f634661/aws_sam_translator-1.103.0-py3-none-any.whl", hash = "sha256:d4eb4a1efa62f00b253ee5f8c0084bd4b7687186c6a12338f900ebe07ff74dad", size = 403100, upload-time = "2025-11-21T19:50:50.528Z" }, +] + +[[package]] +name = "aws-xray-sdk" +version = "2.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/14/25/0cbd7a440080def5e6f063720c3b190a25f8aa2938c1e34415dc18241596/aws_xray_sdk-2.15.0.tar.gz", hash = "sha256:794381b96e835314345068ae1dd3b9120bd8b4e21295066c37e8814dbb341365", size = 76315, upload-time = "2025-10-29T20:59:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/c3/f30a7a63e664acc7c2545ca0491b6ce8264536e0e5cad3965f1d1b91e960/aws_xray_sdk-2.15.0-py2.py3-none-any.whl", hash = "sha256:422d62ad7d52e373eebb90b642eb1bb24657afe03b22a8df4a8b2e5108e278a3", size = 103228, upload-time = "2025-10-29T21:00:24.12Z" }, +] + [[package]] name = "backports-tarfile" version = "1.2.0" @@ -276,6 +367,55 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f4/be/6985abb1011fda8a523cfe21ed9629e397d6e06fb5bae99750402b25c95b/bashlex-0.18-py2.py3-none-any.whl", hash = "sha256:91d73a23a3e51711919c1c899083890cdecffc91d8c088942725ac13e9dcfffa", size = 69539, upload-time = "2023-01-18T15:21:24.167Z" }, ] +[[package]] +name = "blinker" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, +] + +[[package]] +name = "boto3" +version = "1.40.61" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/f9/6ef8feb52c3cce5ec3967a535a6114b57ac7949fd166b0f3090c2b06e4e5/boto3-1.40.61.tar.gz", hash = "sha256:d6c56277251adf6c2bdd25249feae625abe4966831676689ff23b4694dea5b12", size = 111535, upload-time = "2025-10-28T19:26:57.247Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/24/3bf865b07d15fea85b63504856e137029b6acbc73762496064219cdb265d/boto3-1.40.61-py3-none-any.whl", hash = "sha256:6b9c57b2a922b5d8c17766e29ed792586a818098efe84def27c8f582b33f898c", size = 139321, upload-time = "2025-10-28T19:26:55.007Z" }, +] + +[[package]] +name = "botocore" +version = "1.40.61" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/a3/81d3a47c2dbfd76f185d3b894f2ad01a75096c006a2dd91f237dca182188/botocore-1.40.61.tar.gz", hash = "sha256:a2487ad69b090f9cccd64cf07c7021cd80ee9c0655ad974f87045b02f3ef52cd", size = 14393956, upload-time = "2025-10-28T19:26:46.108Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/c5/f6ce561004db45f0b847c2cd9b19c67c6bf348a82018a48cb718be6b58b0/botocore-1.40.61-py3-none-any.whl", hash = "sha256:17ebae412692fd4824f99cde0f08d50126dc97954008e5ba2b522eb049238aa7", size = 14055973, upload-time = "2025-10-28T19:26:42.15Z" }, +] + +[[package]] +name = "botocore-stubs" +version = "1.42.41" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-awscrt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0c/a8/a26608ff39e3a5866c6c79eda10133490205cbddd45074190becece3ff2a/botocore_stubs-1.42.41.tar.gz", hash = "sha256:dbeac2f744df6b814ce83ec3f3777b299a015cbea57a2efc41c33b8c38265825", size = 42411, upload-time = "2026-02-03T20:46:14.479Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/76/cab7af7f16c0b09347f2ebe7ffda7101132f786acb767666dce43055faab/botocore_stubs-1.42.41-py3-none-any.whl", hash = "sha256:9423110fb0e391834bd2ed44ae5f879d8cb370a444703d966d30842ce2bcb5f0", size = 66759, upload-time = "2026-02-03T20:46:13.02Z" }, +] + [[package]] name = "bracex" version = "2.6" @@ -394,6 +534,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] +[[package]] +name = "cfn-lint" +version = "1.41.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aws-sam-translator" }, + { name = "jsonpatch" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pyyaml" }, + { name = "regex" }, + { name = "sympy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/436c192cdf8dbddd8e09a591384f126c5a47937c14953d87b1dacacd0543/cfn_lint-1.41.0.tar.gz", hash = "sha256:6feca1cf57f9ed2833bab68d9b1d38c8033611e571fa792e45ab4a39e2b8ab57", size = 3408534, upload-time = "2025-11-18T20:03:33.431Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/5e/81ef8f87894543210d783a495c8880cfb0b5baa0ee3bcc6d852f1b343863/cfn_lint-1.41.0-py3-none-any.whl", hash = "sha256:cd43f76f59a664b2bad580840827849fac0d56a3b80e9a41315d8ab5ff6b563a", size = 5674429, upload-time = "2025-11-18T20:03:31.083Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.4" @@ -742,6 +901,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + [[package]] name = "docstring-parser" version = "0.17.0" @@ -849,6 +1022,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, ] +[[package]] +name = "flask" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "markupsafe" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/00/35d85dcce6c57fdc871f3867d465d780f302a175ea360f62533f12b27e2b/flask-3.1.3.tar.gz", hash = "sha256:0ef0e52b8a9cd932855379197dd8f94047b359ca0a78695144304cb45f87c9eb", size = 759004, upload-time = "2026-02-19T05:00:57.678Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/34f6962f9b9e9c71f6e5ed806e0d0ff03c9d1b0b2340088a0cf4bce09b18/flask-3.1.3-py3-none-any.whl", hash = "sha256:f4bcbefc124291925f1a26446da31a5178f9483862233b23c0c96a20701f670c", size = 103424, upload-time = "2026-02-19T05:00:56.027Z" }, +] + +[[package]] +name = "flask-cors" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flask" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/74/0fc0fa68d62f21daef41017dafab19ef4b36551521260987eb3a5394c7ba/flask_cors-6.0.2.tar.gz", hash = "sha256:6e118f3698249ae33e429760db98ce032a8bf9913638d085ca0f4c5534ad2423", size = 13472, upload-time = "2025-12-12T20:31:42.861Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/af/72ad54402e599152de6d067324c46fe6a4f531c7c65baf7e96c63db55eaf/flask_cors-6.0.2-py3-none-any.whl", hash = "sha256:e57544d415dfd7da89a9564e1e3a9e515042df76e12130641ca6f3f2f03b699a", size = 13257, upload-time = "2025-12-12T20:31:41.3Z" }, +] + [[package]] name = "frozenlist" version = "1.8.0" @@ -1538,6 +1741,15 @@ grpc = [ { name = "grpcio" }, ] +[[package]] +name = "graphql-core" +version = "3.2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/c5/36aa96205c3ecbb3d34c7c24189e4553c7ca2ebc7e1dd07432339b980272/graphql_core-3.2.8.tar.gz", hash = "sha256:015457da5d996c924ddf57a43f4e959b0b94fb695b85ed4c29446e508ed65cf3", size = 513181, upload-time = "2026-03-05T19:55:37.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/41/cb887d9afc5dabd78feefe6ccbaf83ff423c206a7a1b7aeeac05120b2125/graphql_core-3.2.8-py3-none-any.whl", hash = "sha256:cbee07bee1b3ed5e531723685369039f32ff815ef60166686e0162f540f1520c", size = 207349, upload-time = "2026-03-05T19:55:35.911Z" }, +] + [[package]] name = "graphviz" version = "0.21" @@ -1930,6 +2142,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + [[package]] name = "jaraco-classes" version = "3.4.0" @@ -1980,7 +2201,7 @@ name = "jinja2" version = "3.1.6" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "markupsafe", marker = "python_full_version < '3.14'" }, + { name = "markupsafe" }, ] sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } wheels = [ @@ -2060,9 +2281,60 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] +[[package]] +name = "jmespath" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377, upload-time = "2026-01-22T16:35:26.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" }, +] + +[[package]] +name = "joserfc" +version = "1.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/90/b8cc8635c4ce2e5e8104bf26ef147f6e599478f6329107283cdc53aae97f/joserfc-1.6.3.tar.gz", hash = "sha256:c00c2830db969b836cba197e830e738dd9dda0955f1794e55d3c636f17f5c9a6", size = 229090, upload-time = "2026-02-25T15:33:38.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4f/124b3301067b752f44f292f0b9a74e837dd75ff863ee39500a082fc4c733/joserfc-1.6.3-py3-none-any.whl", hash = "sha256:6beab3635358cbc565cb94fb4c53d0557e6d10a15b933e2134939351590bda9a", size = 70465, upload-time = "2026-02-25T15:33:36.997Z" }, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpointer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/78/18813351fe5d63acad16aec57f94ec2b70a09e53ca98145589e185423873/jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c", size = 21699, upload-time = "2023-06-26T12:07:29.144Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898, upload-time = "2023-06-16T21:01:28.466Z" }, +] + +[[package]] +name = "jsonpath-ng" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/32/58/250751940d75c8019659e15482d548a4aa3b6ce122c515102a4bfdac50e3/jsonpath_ng-1.8.0.tar.gz", hash = "sha256:54252968134b5e549ea5b872f1df1168bd7defe1a52fed5a358c194e1943ddc3", size = 74513, upload-time = "2026-02-24T14:42:06.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/99/33c7d78a3fb70d545fd5411ac67a651c81602cc09c9cf0df383733f068c5/jsonpath_ng-1.8.0-py3-none-any.whl", hash = "sha256:b8dde192f8af58d646fc031fac9c99fe4d00326afc4148f1f043c601a8cfe138", size = 67844, upload-time = "2026-02-28T00:53:19.637Z" }, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, +] + [[package]] name = "jsonschema" -version = "4.25.1" +version = "4.24.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -2070,9 +2342,23 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/6e/35174c1d3f30560848c82d3c233c01420e047d70925c897a4d6e932b4898/jsonschema-4.24.1.tar.gz", hash = "sha256:fe45a130cc7f67cd0d67640b4e7e3e2e666919462ae355eda238296eafeb4b5d", size = 356635, upload-time = "2025-07-17T14:40:01.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/7f/ea48ffb58f9791f9d97ccb35e42fea1ebc81c67ce36dc4b8b2eee60e8661/jsonschema-4.24.1-py3-none-any.whl", hash = "sha256:6b916866aa0b61437785f1277aa2cbd63512e8d4b47151072ef13292049b4627", size = 89060, upload-time = "2025-07-17T14:39:59.471Z" }, +] + +[[package]] +name = "jsonschema-path" +version = "0.4.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pathable" }, + { name = "pyyaml" }, + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/8a/7e6102f2b8bdc6705a9eb5294f8f6f9ccd3a8420e8e8e19671d1dd773251/jsonschema_path-0.4.5.tar.gz", hash = "sha256:c6cd7d577ae290c7defd4f4029e86fdb248ca1bd41a07557795b3c95e5144918", size = 15113, upload-time = "2026-03-03T09:56:46.87Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/04/d5/4e96c44f6c1ea3d812cf5391d81a4f5abaa540abf8d04ecd7f66e0ed11df/jsonschema_path-0.4.5-py3-none-any.whl", hash = "sha256:7d77a2c3f3ec569a40efe5c5f942c44c1af2a6f96fe0866794c9ef5b8f87fd65", size = 19368, upload-time = "2026-03-03T09:56:45.39Z" }, ] [[package]] @@ -2105,6 +2391,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, ] +[[package]] +name = "lazy-object-proxy" +version = "1.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/08/a2/69df9c6ba6d316cfd81fe2381e464db3e6de5db45f8c43c6a23504abf8cb/lazy_object_proxy-1.12.0.tar.gz", hash = "sha256:1f5a462d92fd0cfb82f1fab28b51bfb209fabbe6aabf7f0d51472c0c124c0c61", size = 43681, upload-time = "2025-08-22T13:50:06.783Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/2b/d5e8915038acbd6c6a9fcb8aaf923dc184222405d3710285a1fec6e262bc/lazy_object_proxy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61d5e3310a4aa5792c2b599a7a78ccf8687292c8eb09cf187cca8f09cf6a7519", size = 26658, upload-time = "2025-08-22T13:42:23.373Z" }, + { url = "https://files.pythonhosted.org/packages/da/8f/91fc00eeea46ee88b9df67f7c5388e60993341d2a406243d620b2fdfde57/lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ca33565f698ac1aece152a10f432415d1a2aa9a42dfe23e5ba2bc255ab91f6", size = 68412, upload-time = "2025-08-22T13:42:24.727Z" }, + { url = "https://files.pythonhosted.org/packages/07/d2/b7189a0e095caedfea4d42e6b6949d2685c354263bdf18e19b21ca9b3cd6/lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01c7819a410f7c255b20799b65d36b414379a30c6f1684c7bd7eb6777338c1b", size = 67559, upload-time = "2025-08-22T13:42:25.875Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b013840cc43971582ff1ceaf784d35d3a579650eb6cc348e5e6ed7e34d28/lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:029d2b355076710505c9545aef5ab3f750d89779310e26ddf2b7b23f6ea03cd8", size = 66651, upload-time = "2025-08-22T13:42:27.427Z" }, + { url = "https://files.pythonhosted.org/packages/7e/6f/b7368d301c15612fcc4cd00412b5d6ba55548bde09bdae71930e1a81f2ab/lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc6e3614eca88b1c8a625fc0a47d0d745e7c3255b21dac0e30b3037c5e3deeb8", size = 66901, upload-time = "2025-08-22T13:42:28.585Z" }, + { url = "https://files.pythonhosted.org/packages/61/1b/c6b1865445576b2fc5fa0fbcfce1c05fee77d8979fd1aa653dd0f179aefc/lazy_object_proxy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:be5fe974e39ceb0d6c9db0663c0464669cf866b2851c73971409b9566e880eab", size = 26536, upload-time = "2025-08-22T13:42:29.636Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/4684b1e128a87821e485f5a901b179790e6b5bc02f89b7ee19c23be36ef3/lazy_object_proxy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1cf69cd1a6c7fe2dbcc3edaa017cf010f4192e53796538cc7d5e1fedbfa4bcff", size = 26656, upload-time = "2025-08-22T13:42:30.605Z" }, + { url = "https://files.pythonhosted.org/packages/3a/03/1bdc21d9a6df9ff72d70b2ff17d8609321bea4b0d3cffd2cea92fb2ef738/lazy_object_proxy-1.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:efff4375a8c52f55a145dc8487a2108c2140f0bec4151ab4e1843e52eb9987ad", size = 68832, upload-time = "2025-08-22T13:42:31.675Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4b/5788e5e8bd01d19af71e50077ab020bc5cce67e935066cd65e1215a09ff9/lazy_object_proxy-1.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1192e8c2f1031a6ff453ee40213afa01ba765b3dc861302cd91dbdb2e2660b00", size = 69148, upload-time = "2025-08-22T13:42:32.876Z" }, + { url = "https://files.pythonhosted.org/packages/79/0e/090bf070f7a0de44c61659cb7f74c2fe02309a77ca8c4b43adfe0b695f66/lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3605b632e82a1cbc32a1e5034278a64db555b3496e0795723ee697006b980508", size = 67800, upload-time = "2025-08-22T13:42:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/cf/d2/b320325adbb2d119156f7c506a5fbfa37fcab15c26d13cf789a90a6de04e/lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a61095f5d9d1a743e1e20ec6d6db6c2ca511961777257ebd9b288951b23b44fa", size = 68085, upload-time = "2025-08-22T13:42:35.197Z" }, + { url = "https://files.pythonhosted.org/packages/6a/48/4b718c937004bf71cd82af3713874656bcb8d0cc78600bf33bb9619adc6c/lazy_object_proxy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:997b1d6e10ecc6fb6fe0f2c959791ae59599f41da61d652f6c903d1ee58b7370", size = 26535, upload-time = "2025-08-22T13:42:36.521Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1b/b5f5bd6bda26f1e15cd3232b223892e4498e34ec70a7f4f11c401ac969f1/lazy_object_proxy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ee0d6027b760a11cc18281e702c0309dd92da458a74b4c15025d7fc490deede", size = 26746, upload-time = "2025-08-22T13:42:37.572Z" }, + { url = "https://files.pythonhosted.org/packages/55/64/314889b618075c2bfc19293ffa9153ce880ac6153aacfd0a52fcabf21a66/lazy_object_proxy-1.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ab2c584e3cc8be0dfca422e05ad30a9abe3555ce63e9ab7a559f62f8dbc6ff9", size = 71457, upload-time = "2025-08-22T13:42:38.743Z" }, + { url = "https://files.pythonhosted.org/packages/11/53/857fc2827fc1e13fbdfc0ba2629a7d2579645a06192d5461809540b78913/lazy_object_proxy-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14e348185adbd03ec17d051e169ec45686dcd840a3779c9d4c10aabe2ca6e1c0", size = 71036, upload-time = "2025-08-22T13:42:40.184Z" }, + { url = "https://files.pythonhosted.org/packages/2b/24/e581ffed864cd33c1b445b5763d617448ebb880f48675fc9de0471a95cbc/lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4fcbe74fb85df8ba7825fa05eddca764138da752904b378f0ae5ab33a36c308", size = 69329, upload-time = "2025-08-22T13:42:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/15f8f5a0b0b2e668e756a152257d26370132c97f2f1943329b08f057eff0/lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:563d2ec8e4d4b68ee7848c5ab4d6057a6d703cb7963b342968bb8758dda33a23", size = 70690, upload-time = "2025-08-22T13:42:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/5d/aa/f02be9bbfb270e13ee608c2b28b8771f20a5f64356c6d9317b20043c6129/lazy_object_proxy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:53c7fd99eb156bbb82cbc5d5188891d8fdd805ba6c1e3b92b90092da2a837073", size = 26563, upload-time = "2025-08-22T13:42:43.685Z" }, + { url = "https://files.pythonhosted.org/packages/f4/26/b74c791008841f8ad896c7f293415136c66cc27e7c7577de4ee68040c110/lazy_object_proxy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:86fd61cb2ba249b9f436d789d1356deae69ad3231dc3c0f17293ac535162672e", size = 26745, upload-time = "2025-08-22T13:42:44.982Z" }, + { url = "https://files.pythonhosted.org/packages/9b/52/641870d309e5d1fb1ea7d462a818ca727e43bfa431d8c34b173eb090348c/lazy_object_proxy-1.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81d1852fb30fab81696f93db1b1e55a5d1ff7940838191062f5f56987d5fcc3e", size = 71537, upload-time = "2025-08-22T13:42:46.141Z" }, + { url = "https://files.pythonhosted.org/packages/47/b6/919118e99d51c5e76e8bf5a27df406884921c0acf2c7b8a3b38d847ab3e9/lazy_object_proxy-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be9045646d83f6c2664c1330904b245ae2371b5c57a3195e4028aedc9f999655", size = 71141, upload-time = "2025-08-22T13:42:47.375Z" }, + { url = "https://files.pythonhosted.org/packages/e5/47/1d20e626567b41de085cf4d4fb3661a56c159feaa73c825917b3b4d4f806/lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:67f07ab742f1adfb3966c40f630baaa7902be4222a17941f3d85fd1dae5565ff", size = 69449, upload-time = "2025-08-22T13:42:48.49Z" }, + { url = "https://files.pythonhosted.org/packages/58/8d/25c20ff1a1a8426d9af2d0b6f29f6388005fc8cd10d6ee71f48bff86fdd0/lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75ba769017b944fcacbf6a80c18b2761a1795b03f8899acdad1f1c39db4409be", size = 70744, upload-time = "2025-08-22T13:42:49.608Z" }, + { url = "https://files.pythonhosted.org/packages/c0/67/8ec9abe15c4f8a4bcc6e65160a2c667240d025cbb6591b879bea55625263/lazy_object_proxy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:7b22c2bbfb155706b928ac4d74c1a63ac8552a55ba7fff4445155523ea4067e1", size = 26568, upload-time = "2025-08-22T13:42:57.719Z" }, + { url = "https://files.pythonhosted.org/packages/23/12/cd2235463f3469fd6c62d41d92b7f120e8134f76e52421413a0ad16d493e/lazy_object_proxy-1.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4a79b909aa16bde8ae606f06e6bbc9d3219d2e57fb3e0076e17879072b742c65", size = 27391, upload-time = "2025-08-22T13:42:50.62Z" }, + { url = "https://files.pythonhosted.org/packages/60/9e/f1c53e39bbebad2e8609c67d0830cc275f694d0ea23d78e8f6db526c12d3/lazy_object_proxy-1.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:338ab2f132276203e404951205fe80c3fd59429b3a724e7b662b2eb539bb1be9", size = 80552, upload-time = "2025-08-22T13:42:51.731Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b6/6c513693448dcb317d9d8c91d91f47addc09553613379e504435b4cc8b3e/lazy_object_proxy-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c40b3c9faee2e32bfce0df4ae63f4e73529766893258eca78548bac801c8f66", size = 82857, upload-time = "2025-08-22T13:42:53.225Z" }, + { url = "https://files.pythonhosted.org/packages/12/1c/d9c4aaa4c75da11eb7c22c43d7c90a53b4fca0e27784a5ab207768debea7/lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:717484c309df78cedf48396e420fa57fc8a2b1f06ea889df7248fdd156e58847", size = 80833, upload-time = "2025-08-22T13:42:54.391Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ae/29117275aac7d7d78ae4f5a4787f36ff33262499d486ac0bf3e0b97889f6/lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b7ea5ea1ffe15059eb44bcbcb258f97bcb40e139b88152c40d07b1a1dfc9ac", size = 79516, upload-time = "2025-08-22T13:42:55.812Z" }, + { url = "https://files.pythonhosted.org/packages/19/40/b4e48b2c38c69392ae702ae7afa7b6551e0ca5d38263198b7c79de8b3bdf/lazy_object_proxy-1.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:08c465fb5cd23527512f9bd7b4c7ba6cec33e28aad36fbbe46bf7b858f9f3f7f", size = 27656, upload-time = "2025-08-22T13:42:56.793Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3a/277857b51ae419a1574557c0b12e0d06bf327b758ba94cafc664cb1e2f66/lazy_object_proxy-1.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9defba70ab943f1df98a656247966d7729da2fe9c2d5d85346464bf320820a3", size = 26582, upload-time = "2025-08-22T13:49:49.366Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b6/c5e0fa43535bb9c87880e0ba037cdb1c50e01850b0831e80eb4f4762f270/lazy_object_proxy-1.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6763941dbf97eea6b90f5b06eb4da9418cc088fce0e3883f5816090f9afcde4a", size = 71059, upload-time = "2025-08-22T13:49:50.488Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/7dcad19c685963c652624702f1a968ff10220b16bfcc442257038216bf55/lazy_object_proxy-1.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdc70d81235fc586b9e3d1aeef7d1553259b62ecaae9db2167a5d2550dcc391a", size = 71034, upload-time = "2025-08-22T13:49:54.224Z" }, + { url = "https://files.pythonhosted.org/packages/12/ac/34cbfb433a10e28c7fd830f91c5a348462ba748413cbb950c7f259e67aa7/lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0a83c6f7a6b2bfc11ef3ed67f8cbe99f8ff500b05655d8e7df9aab993a6abc95", size = 69529, upload-time = "2025-08-22T13:49:55.29Z" }, + { url = "https://files.pythonhosted.org/packages/6f/6a/11ad7e349307c3ca4c0175db7a77d60ce42a41c60bcb11800aabd6a8acb8/lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:256262384ebd2a77b023ad02fbcc9326282bcfd16484d5531154b02bc304f4c5", size = 70391, upload-time = "2025-08-22T13:49:56.35Z" }, + { url = "https://files.pythonhosted.org/packages/59/97/9b410ed8fbc6e79c1ee8b13f8777a80137d4bc189caf2c6202358e66192c/lazy_object_proxy-1.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7601ec171c7e8584f8ff3f4e440aa2eebf93e854f04639263875b8c2971f819f", size = 26988, upload-time = "2025-08-22T13:49:57.302Z" }, + { url = "https://files.pythonhosted.org/packages/41/a0/b91504515c1f9a299fc157967ffbd2f0321bce0516a3d5b89f6f4cad0355/lazy_object_proxy-1.12.0-pp39.pp310.pp311.graalpy311-none-any.whl", hash = "sha256:c3b2e0af1f7f77c4263759c4824316ce458fabe0fceadcd24ef8ca08b2d1e402", size = 15072, upload-time = "2025-08-22T13:50:05.498Z" }, +] + [[package]] name = "litellm" version = "1.78.0" @@ -2427,6 +2758,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] +[[package]] +name = "moto" +version = "5.1.22" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, + { name = "botocore" }, + { name = "cryptography" }, + { name = "jinja2" }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "responses" }, + { name = "werkzeug" }, + { name = "xmltodict" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/3d/1765accbf753dc1ae52f26a2e2ed2881d78c2eb9322c178e45312472e4a0/moto-5.1.22.tar.gz", hash = "sha256:e5b2c378296e4da50ce5a3c355a1743c8d6d396ea41122f5bb2a40f9b9a8cc0e", size = 8547792, upload-time = "2026-03-08T21:06:43.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/4f/8812a01e3e0bd6be3e13b90432fb5c696af9a720af3f00e6eba5ad748345/moto-5.1.22-py3-none-any.whl", hash = "sha256:d9f20ae3cf29c44f93c1f8f06c8f48d5560e5dc027816ef1d0d2059741ffcfbe", size = 6617400, upload-time = "2026-03-08T21:06:41.093Z" }, +] + +[package.optional-dependencies] +s3 = [ + { name = "py-partiql-parser" }, + { name = "pyyaml" }, +] +server = [ + { name = "antlr4-python3-runtime" }, + { name = "aws-sam-translator" }, + { name = "aws-xray-sdk" }, + { name = "cfn-lint" }, + { name = "docker" }, + { name = "flask" }, + { name = "flask-cors" }, + { name = "graphql-core" }, + { name = "joserfc" }, + { name = "jsonpath-ng" }, + { name = "openapi-spec-validator" }, + { name = "py-partiql-parser" }, + { name = "pydantic" }, + { name = "pyparsing" }, + { name = "pyyaml" }, + { name = "setuptools" }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, +] + [[package]] name = "msgpack" version = "1.1.2" @@ -2693,6 +3077,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/73/d6b999782ae22f16971cc05378b3b33f6a89ede3b9619e8366aa23484bca/mypy_protobuf-3.6.0-py3-none-any.whl", hash = "sha256:56176e4d569070e7350ea620262478b49b7efceba4103d468448f1d21492fd6c", size = 16434, upload-time = "2024-04-01T20:24:40.583Z" }, ] +[[package]] +name = "networkx" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263, upload-time = "2024-10-21T12:39:36.247Z" }, +] + +[[package]] +name = "networkx" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, +] + [[package]] name = "nexus-rpc" version = "1.4.0" @@ -2805,6 +3215,40 @@ litellm = [ { name = "litellm", marker = "python_full_version < '3.14'" }, ] +[[package]] +name = "openapi-schema-validator" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonschema" }, + { name = "jsonschema-specifications" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "referencing" }, + { name = "rfc3339-validator" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/21/4b/67b24b2b23d96ea862be2cca3632a546f67a22461200831213e80c3c6011/openapi_schema_validator-0.8.1.tar.gz", hash = "sha256:4c57266ce8cbfa37bb4eb4d62cdb7d19356c3a468e3535743c4562863e1790da", size = 23134, upload-time = "2026-03-02T08:46:29.807Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/87/e9f29f463b230d4b47d65e17858c595153a8ca8c1775f16e406aa82d455d/openapi_schema_validator-0.8.1-py3-none-any.whl", hash = "sha256:0f5859794c5bfa433d478dc5ac5e5768d50adc56b14380c8a6fd3a8113e89c9b", size = 19211, upload-time = "2026-03-02T08:46:28.154Z" }, +] + +[[package]] +name = "openapi-spec-validator" +version = "0.8.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonschema" }, + { name = "jsonschema-path" }, + { name = "lazy-object-proxy" }, + { name = "openapi-schema-validator" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/de/0199b15f5dde3ca61df6e6b3987420bfd424db077998f0162e8ffe12e4f5/openapi_spec_validator-0.8.4.tar.gz", hash = "sha256:8bb324b9b08b9b368b1359dec14610c60a8f3a3dd63237184eb04456d4546f49", size = 1756847, upload-time = "2026-03-01T15:48:19.499Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/70/52310f9ece5f4eb02e0b31d538b51f729169517767a8d0100a25db31d67f/openapi_spec_validator-0.8.4-py3-none-any.whl", hash = "sha256:cf905117063d7c4d495c8a5a167a1f2a8006da6ffa8ba234a7ed0d0f11454d51", size = 50330, upload-time = "2026-03-01T15:48:17.668Z" }, +] + [[package]] name = "openinference-instrumentation" version = "0.1.42" @@ -3031,6 +3475,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathable" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/55/b748445cb4ea6b125626f15379be7c96d1035d4fa3e8fee362fa92298abf/pathable-0.5.0.tar.gz", hash = "sha256:d81938348a1cacb525e7c75166270644782c0fb9c8cecc16be033e71427e0ef1", size = 16655, upload-time = "2026-02-20T08:47:00.748Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/96/5a770e5c461462575474468e5af931cff9de036e7c2b4fea23c1c58d2cbe/pathable-0.5.0-py3-none-any.whl", hash = "sha256:646e3d09491a6351a0c82632a09c02cdf70a252e73196b36d8a15ba0a114f0a6", size = 16867, upload-time = "2026-02-20T08:46:59.536Z" }, +] + [[package]] name = "pathspec" version = "0.12.1" @@ -3222,6 +3675,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/05/33/2d74d588408caedd065c2497bdb5ef83ce6082db01289a1e1147f6639802/psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8", size = 249898, upload-time = "2024-01-19T20:47:59.238Z" }, ] +[[package]] +name = "py-partiql-parser" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/7a/a0f6bda783eb4df8e3dfd55973a1ac6d368a89178c300e1b5b91cd181e5e/py_partiql_parser-0.6.3.tar.gz", hash = "sha256:09cecf916ce6e3da2c050f0cb6106166de42c33d34a078ec2eb19377ea70389a", size = 17456, upload-time = "2025-10-18T13:56:13.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/33/a7cbfccc39056a5cf8126b7aab4c8bafbedd4f0ca68ae40ecb627a2d2cd3/py_partiql_parser-0.6.3-py2.py3-none-any.whl", hash = "sha256:deb0769c3346179d2f590dcbde556f708cdb929059fb654bad75f4cf6e07f582", size = 23752, upload-time = "2025-10-18T13:56:12.256Z" }, +] + [[package]] name = "pyarrow" version = "23.0.1" @@ -3311,7 +3773,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.12.5" +version = "2.12.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -3319,9 +3781,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/ad/a17bc283d7d81837c061c49e3eaa27a45991759a1b7eae1031921c6bd924/pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac", size = 821038, upload-time = "2025-11-05T10:50:08.59Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/82/2f/e68750da9b04856e2a7ec56fc6f034a5a79775e9b9a81882252789873798/pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e", size = 463400, upload-time = "2025-11-05T10:50:06.732Z" }, ] [[package]] @@ -3915,6 +4377,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, ] +[[package]] +name = "responses" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/b4/b7e040379838cc71bf5aabdb26998dfbe5ee73904c92c1c161faf5de8866/responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4", size = 81303, upload-time = "2026-02-19T14:38:05.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/04/7f73d05b556da048923e31a0cc878f03be7c5425ed1f268082255c75d872/responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37", size = 35099, upload-time = "2026-02-19T14:38:03.847Z" }, +] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" }, +] + [[package]] name = "rfc3986" version = "2.0.0" @@ -4109,6 +4597,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/23/b3763a237d2523d40a31fe2d1a301191fe392dd48d3014977d079cf8c0bd/ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4", size = 8091891, upload-time = "2024-08-08T15:43:04.162Z" }, ] +[[package]] +name = "s3transfer" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/74/8d69dcb7a9efe8baa2046891735e5dfe433ad558ae23d9e3c14c633d1d58/s3transfer-0.14.0.tar.gz", hash = "sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125", size = 151547, upload-time = "2025-09-09T19:23:31.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/f0/ae7ca09223a81a1d890b2557186ea015f6e0502e9b8cb8e1813f1d8cfa4e/s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456", size = 85712, upload-time = "2025-09-09T19:23:30.041Z" }, +] + [[package]] name = "secretstorage" version = "3.4.0" @@ -4266,6 +4766,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175, upload-time = "2025-10-28T17:34:09.13Z" }, ] +[[package]] +name = "sympy" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, +] + [[package]] name = "temporalio" version = "1.24.0" @@ -4279,6 +4791,10 @@ dependencies = [ ] [package.optional-dependencies] +aioboto3 = [ + { name = "aioboto3" }, + { name = "types-aioboto3", extra = ["s3"] }, +] google-adk = [ { name = "google-adk" }, ] @@ -4305,6 +4821,7 @@ dev = [ { name = "grpcio-tools" }, { name = "httpx" }, { name = "maturin" }, + { name = "moto", extra = ["s3", "server"] }, { name = "mypy" }, { name = "mypy-protobuf" }, { name = "openai-agents" }, @@ -4328,6 +4845,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "aioboto3", marker = "extra == 'aioboto3'", specifier = ">=10.4.0" }, { name = "google-adk", marker = "extra == 'google-adk'", specifier = ">=1.27.0,<2" }, { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.48.2,<2" }, { name = "mcp", marker = "extra == 'openai-agents'", specifier = ">=1.9.4,<2" }, @@ -4338,10 +4856,11 @@ requires-dist = [ { name = "protobuf", specifier = ">=3.20,<7.0.0" }, { name = "pydantic", marker = "extra == 'pydantic'", specifier = ">=2.0.0,<3" }, { name = "python-dateutil", marker = "python_full_version < '3.11'", specifier = ">=2.8.2,<3" }, + { name = "types-aioboto3", extras = ["s3"], marker = "extra == 'aioboto3'", specifier = ">=10.4.0" }, { name = "types-protobuf", specifier = ">=3.20" }, { name = "typing-extensions", specifier = ">=4.2.0,<5" }, ] -provides-extras = ["grpc", "opentelemetry", "pydantic", "openai-agents", "google-adk"] +provides-extras = ["grpc", "opentelemetry", "pydantic", "openai-agents", "google-adk", "aioboto3"] [package.metadata.requires-dev] dev = [ @@ -4351,6 +4870,7 @@ dev = [ { name = "grpcio-tools", specifier = ">=1.48.2,<2" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "maturin", specifier = ">=1.8.2" }, + { name = "moto", extras = ["s3", "server"], specifier = ">=5" }, { name = "mypy", specifier = "==1.18.2" }, { name = "mypy-protobuf", specifier = ">=3.3.0,<4" }, { name = "openai-agents", marker = "python_full_version >= '3.14'", specifier = ">=0.3,<0.7" }, @@ -4575,6 +5095,60 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/66/ab7efd8941f0bc7b2bd555b0f0471bff77df4c88e0cc31120c82737fec77/twisted-25.5.0-py3-none-any.whl", hash = "sha256:8559f654d01a54a8c3efe66d533d43f383531ebf8d81d9f9ab4769d91ca15df7", size = 3204767, upload-time = "2025-06-07T09:52:21.428Z" }, ] +[[package]] +name = "types-aioboto3" +version = "15.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "types-aiobotocore" }, + { name = "types-s3transfer" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/76/e162ea2ef8d414d4f36f28a6e0b6078ccef3f2f9d5f957859f303995c528/types_aioboto3-15.5.0.tar.gz", hash = "sha256:5769a1c3df7ca1abedf3656ddf0b970c9b0436d0f88cf4686040b55cd2a02925", size = 81059, upload-time = "2025-10-31T01:11:54.445Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/1d/e187fbe9771dffb5f0801e315ac23a6c383c14d1cbb90da6ca3ad1ea9b06/types_aioboto3-15.5.0-py3-none-any.whl", hash = "sha256:8aed7c9b6fe9b59e6ce74f7a6db7b8a9912a34c8f80ed639fac1fa59d6b20aa1", size = 42521, upload-time = "2025-10-31T01:11:47.832Z" }, +] + +[package.optional-dependencies] +s3 = [ + { name = "types-aiobotocore-s3" }, +] + +[[package]] +name = "types-aiobotocore" +version = "2.26.0.post2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3e/27/c60789f312a3630cbc82181e4d6e809bd8801b471de99f14ceb11f4c5c26/types_aiobotocore-2.26.0.post2.tar.gz", hash = "sha256:68ebe5e9de3201442e56359af182493e2e642e855a9133a5918352cbf5ac4e2d", size = 86472, upload-time = "2025-12-02T16:52:55.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/47/e080e365376619d4062da8989747ecf7c8404bd94b2de10904239a3104f0/types_aiobotocore-2.26.0.post2-py3-none-any.whl", hash = "sha256:0e19caffd6ce6b1c3e7ba5b085d1d03357672e1aa65e5bcdfd9efb026a1041f7", size = 54207, upload-time = "2025-12-02T16:52:48.246Z" }, +] + +[[package]] +name = "types-aiobotocore-s3" +version = "2.25.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/c6/9bb91a44eed1114690edb15d8251f32392e355dfa0a5b8e1c190b4cf89a4/types_aiobotocore_s3-2.25.2.tar.gz", hash = "sha256:678aa425491af19bd6d011d59ecdbbb7ae7e95800efddcf4fd559ab72c94e194", size = 75955, upload-time = "2025-11-12T01:52:06.536Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/0a/d0d9faefd7caa8536eb97647c38c711e73ab83341a65119d08c2cb20957d/types_aiobotocore_s3-2.25.2-py3-none-any.whl", hash = "sha256:151301e84bb2f1cbf30f0d1ef791bb75c141cfbfe47b93fd317b7f1ba3eb35e4", size = 83626, upload-time = "2025-11-12T01:52:04.763Z" }, +] + +[[package]] +name = "types-awscrt" +version = "0.31.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/26/0aa563e229c269c528a3b8c709fc671ac2a5c564732fab0852ac6ee006cf/types_awscrt-0.31.3.tar.gz", hash = "sha256:09d3eaf00231e0f47e101bd9867e430873bc57040050e2a3bd8305cb4fc30865", size = 18178, upload-time = "2026-03-08T02:31:14.569Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/e5/47a573bbbd0a790f8f9fe452f7188ea72b212d21c9be57d5fc0cbc442075/types_awscrt-0.31.3-py3-none-any.whl", hash = "sha256:e5ce65a00a2ab4f35eacc1e3d700d792338d56e4823ee7b4dbe017f94cfc4458", size = 43340, upload-time = "2026-03-08T02:31:13.38Z" }, +] + [[package]] name = "types-protobuf" version = "6.32.1.20250918" @@ -4596,6 +5170,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] +[[package]] +name = "types-s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/64/42689150509eb3e6e82b33ee3d89045de1592488842ddf23c56957786d05/types_s3transfer-0.16.0.tar.gz", hash = "sha256:b4636472024c5e2b62278c5b759661efeb52a81851cde5f092f24100b1ecb443", size = 13557, upload-time = "2025-12-08T08:13:09.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/27/e88220fe6274eccd3bdf95d9382918716d312f6f6cef6a46332d1ee2feff/types_s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:1c0cd111ecf6e21437cb410f5cddb631bfb2263b77ad973e79b9c6d0cb24e0ef", size = 19247, upload-time = "2025-12-08T08:13:08.426Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" @@ -4761,6 +5344,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] +[[package]] +name = "werkzeug" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/f1/ee81806690a87dab5f5653c1f146c92bc066d7f4cebc603ef88eb9e13957/werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25", size = 864736, upload-time = "2026-02-19T15:17:18.884Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/ec/d58832f89ede95652fd01f4f24236af7d32b70cab2196dfcc2d2fd13c5c2/werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131", size = 225166, upload-time = "2026-02-19T15:17:17.475Z" }, +] + [[package]] name = "wrapt" version = "1.17.3" @@ -4830,14 +5425,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] +[[package]] +name = "xmltodict" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/70/80f3b7c10d2630aa66414bf23d210386700aa390547278c789afa994fd7e/xmltodict-1.0.4.tar.gz", hash = "sha256:6d94c9f834dd9e44514162799d344d815a3a4faec913717a9ecbfa5be1bb8e61", size = 26124, upload-time = "2026-02-22T02:21:22.074Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/34/98a2f52245f4d47be93b580dae5f9861ef58977d73a79eb47c58f1ad1f3a/xmltodict-1.0.4-py3-none-any.whl", hash = "sha256:a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a", size = 13580, upload-time = "2026-02-22T02:21:21.039Z" }, +] + [[package]] name = "yarl" version = "1.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "idna", marker = "python_full_version < '3.14'" }, - { name = "multidict", marker = "python_full_version < '3.14'" }, - { name = "propcache", marker = "python_full_version < '3.14'" }, + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, ] sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } wheels = [