From e6490ad9e5390e84bce3102027fe2a7f289107a3 Mon Sep 17 00:00:00 2001 From: Edward Amsden Date: Wed, 8 Apr 2026 17:52:11 -0500 Subject: [PATCH 1/3] Redis external storage driver --- README.md | 1 + external_storage_redis/README.md | 132 ++++ external_storage_redis/__init__.py | 9 + external_storage_redis/_client.py | 34 + external_storage_redis/_driver.py | 228 ++++++ external_storage_redis/redis_asyncio.py | 52 ++ external_storage_redis/workflows.py | 194 +++++ pyproject.toml | 3 + tests/external_storage_redis/__init__.py | 1 + tests/external_storage_redis/conftest.py | 36 + tests/external_storage_redis/test_redis.py | 721 ++++++++++++++++++ .../test_redis_worker.py | 528 +++++++++++++ uv.lock | 318 ++++---- 13 files changed, 2114 insertions(+), 143 deletions(-) create mode 100644 external_storage_redis/README.md create mode 100644 external_storage_redis/__init__.py create mode 100644 external_storage_redis/_client.py create mode 100644 external_storage_redis/_driver.py create mode 100644 external_storage_redis/redis_asyncio.py create mode 100644 external_storage_redis/workflows.py create mode 100644 tests/external_storage_redis/__init__.py create mode 100644 tests/external_storage_redis/conftest.py create mode 100644 tests/external_storage_redis/test_redis.py create mode 100644 tests/external_storage_redis/test_redis_worker.py diff --git a/README.md b/README.md index d4d6a61b..b989947d 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,7 @@ Some examples require extra dependencies. See each sample's directory for specif * [eager_wf_start](eager_wf_start) - Run a workflow using Eager Workflow Start * [encryption](encryption) - Apply end-to-end encryption for all input/output. * [env_config](env_config) - Load client configuration from TOML files with programmatic overrides. +* [external_storage_redis](external_storage_redis) - Store large payloads in Redis using Temporal external storage. * [gevent_async](gevent_async) - Combine gevent and Temporal. * [hello_standalone_activity](hello_standalone_activity) - Use activities without using a workflow. * [langchain](langchain) - Orchestrate workflows for LangChain. diff --git a/external_storage_redis/README.md b/external_storage_redis/README.md new file mode 100644 index 00000000..662b5ea7 --- /dev/null +++ b/external_storage_redis/README.md @@ -0,0 +1,132 @@ +# Redis External Storage + +This sample packages a Redis-backed `StorageDriver` implementation for Temporal +external storage. + +The code lives in: + +* `external_storage_redis/_driver.py` for the `RedisStorageDriver` +* `external_storage_redis/_client.py` for the storage client abstraction +* `external_storage_redis/redis_asyncio.py` for the `redis.asyncio` adapter +* `tests/external_storage_redis/` for unit and worker integration tests + +Unlike most samples in this repository, this one is primarily reusable driver +code plus tests rather than a standalone `worker.py` / `starter.py` pair. + +## Install Dependencies + +From the repository root: + + uv sync --group external-storage-redis --group dev + +The `external-storage-redis` group installs `redis`, and the `dev` group +installs `fakeredis` for the test suite. + +## Using The Driver + +```python +import dataclasses + +import redis.asyncio as redis +import temporalio.converter +from temporalio.client import Client +from temporalio.converter import ExternalStorage + +from external_storage_redis import RedisStorageDriver +from external_storage_redis.redis_asyncio import new_redis_asyncio_client + +redis_client = redis.Redis.from_url( + "redis://localhost:6379/0", + decode_responses=False, +) +try: + driver = RedisStorageDriver( + client=new_redis_asyncio_client(redis_client), + key_prefix="temporalio:payloads", + ) + + client = await Client.connect( + "localhost:7233", + data_converter=dataclasses.replace( + temporalio.converter.default(), + external_storage=ExternalStorage( + drivers=[driver], + payload_size_threshold=256 * 1024, + ), + ), + ) +finally: + await redis_client.aclose() +``` + +`decode_responses=False` is required because the driver stores serialized +Temporal `Payload` protobuf bytes as Redis values rather than text. + +## Driver Behavior + +`RedisStorageDriver` accepts these constructor options: + +* `driver_name`: defaults to `"redis"` +* `key_prefix`: defaults to `"temporalio:payloads"` +* `ttl`: optional expiration applied only when a key is first inserted +* `max_payload_size`: defaults to 50 MiB + +Stored keys are content-addressed using SHA-256 and include Temporal execution +context when it is available. A typical workflow-scoped key looks like: + + temporalio:payloads:v0:ns:default:wt:MyWorkflow:wi:my-workflow-id:ri:my-run-id:d:sha256: + +Some behavior to be aware of: + +* Any driver used to store payloads must also be configured on the component + that retrieves them. +* The Redis instance must already exist; the driver does not provision it. +* Identical serialized bytes within the same namespace and workflow/activity + scope share the same Redis key. +* Workflow, activity, namespace, and run identifiers are URL-encoded before + being placed into the key. +* Only payloads at or above `ExternalStorage.payload_size_threshold` are + offloaded. +* If `ttl` is set, duplicate stores do not refresh expiration. +* If a payload key is missing at retrieval time, the driver raises a + non-retryable `ApplicationError`. + +## Custom Redis Clients + +To use a Redis library other than `redis.asyncio`, implement +`RedisStorageDriverClient`: + +```python +from datetime import timedelta + +from external_storage_redis import RedisStorageDriverClient + + +class MyRedisClient(RedisStorageDriverClient): + async def get(self, *, key: str) -> bytes | None: ... + + async def set_if_absent( + self, + *, + key: str, + data: bytes, + ttl: timedelta | None = None, + ) -> bool: ... +``` + +## Tests + +Run the full Redis sample test suite with: + + uv run pytest tests/external_storage_redis + +Run only the in-memory unit tests with: + + uv run pytest tests/external_storage_redis/test_redis.py + +The worker integration tests use `WorkflowEnvironment.start_local()` and +`fakeredis`. They do not require a real Redis server, but the first run may +download a Temporal dev-server binary. + +Some Temporal dev-server builds disable standalone activity execution. When +that happens, the two standalone-activity integration tests skip automatically. diff --git a/external_storage_redis/__init__.py b/external_storage_redis/__init__.py new file mode 100644 index 00000000..4f0a066d --- /dev/null +++ b/external_storage_redis/__init__.py @@ -0,0 +1,9 @@ +"""Redis storage driver sample for Temporal external storage.""" + +from external_storage_redis._client import RedisStorageDriverClient +from external_storage_redis._driver import RedisStorageDriver + +__all__ = [ + "RedisStorageDriverClient", + "RedisStorageDriver", +] diff --git a/external_storage_redis/_client.py b/external_storage_redis/_client.py new file mode 100644 index 00000000..94b493d4 --- /dev/null +++ b/external_storage_redis/_client.py @@ -0,0 +1,34 @@ +"""Redis storage driver client abstraction.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from datetime import timedelta + + +class RedisStorageDriverClient(ABC): + """Abstract base class for the Redis operations used by the driver.""" + + @abstractmethod + async def get(self, *, key: str) -> bytes | None: + """Return the raw bytes stored for *key*, or ``None`` if absent.""" + + @abstractmethod + async def set_if_absent( + self, + *, + key: str, + data: bytes, + ttl: timedelta | None = None, + ) -> bool: + """Store *data* under *key* only if the key does not already exist. + + Args: + key: Redis key to store. + data: Serialized payload bytes. + ttl: Optional expiration to apply only when the value is inserted. + + Returns: + ``True`` if the value was inserted, ``False`` if the key already + existed. + """ diff --git a/external_storage_redis/_driver.py b/external_storage_redis/_driver.py new file mode 100644 index 00000000..c13785b5 --- /dev/null +++ b/external_storage_redis/_driver.py @@ -0,0 +1,228 @@ +"""Redis storage driver for Temporal external storage.""" + +from __future__ import annotations + +import asyncio +import hashlib +import urllib.parse +from collections.abc import Coroutine, Sequence +from datetime import timedelta +from typing import Any, TypeVar + +from temporalio.api.common.v1 import Payload +from temporalio.converter import ( + StorageDriver, + StorageDriverActivityInfo, + StorageDriverClaim, + StorageDriverRetrieveContext, + StorageDriverStoreContext, + StorageDriverWorkflowInfo, +) +from temporalio.exceptions import ApplicationError + +from external_storage_redis._client import RedisStorageDriverClient + +_T = TypeVar("_T") + + +async def _gather_with_cancellation( + coros: Sequence[Coroutine[Any, Any, _T]], +) -> list[_T]: + """Run coroutines concurrently, cancelling remaining tasks on failure.""" + if not coros: + return [] + tasks = [asyncio.create_task(coro) for coro in coros] + try: + return list(await asyncio.gather(*tasks)) + except BaseException: + for task in tasks: + task.cancel() + await asyncio.gather(*tasks, return_exceptions=True) + raise + + +class RedisStorageDriver(StorageDriver): + """Driver for storing and retrieving Temporal payloads in Redis. + + Payloads are stored as Redis string values keyed by a SHA-256 digest of the + serialized payload bytes. The key also includes namespace and + workflow/activity identity segments derived from the storage context so + distinct Temporal scopes remain isolated. + """ + + def __init__( + self, + client: RedisStorageDriverClient, + *, + driver_name: str = "redis", + key_prefix: str = "temporalio:payloads", + ttl: timedelta | None = None, + max_payload_size: int = 50 * 1024 * 1024, + ) -> None: + """Construct the Redis driver. + + Args: + client: A :class:`RedisStorageDriverClient` implementation. Use + :func:`external_storage_redis.redis_asyncio.new_redis_asyncio_client` + to wrap a ``redis.asyncio`` client. + driver_name: Name of this driver instance. Defaults to ``"redis"``. + Override this when registering multiple RedisStorageDriver + instances with distinct configurations under the same + ``ExternalStorage.drivers`` list. + key_prefix: Prefix prepended to all Redis keys. Defaults to + ``"temporalio:payloads"``. + ttl: Optional expiration to apply when a key is first written. + Existing keys are not refreshed when the same payload is stored + again. + max_payload_size: Maximum serialized payload size in bytes that the + driver will accept. Defaults to 52428800 (50 MiB). + """ + if max_payload_size <= 0: + raise ValueError("max_payload_size must be greater than zero") + if ttl is not None and ttl <= timedelta(0): + raise ValueError("ttl must be greater than zero") + self._client = client + self._driver_name = driver_name or "redis" + self._key_prefix = key_prefix.rstrip(":") + self._ttl = ttl + self._max_payload_size = max_payload_size + + def name(self) -> str: + """Return the driver instance name.""" + return self._driver_name + + def type(self) -> str: + """Return the driver type identifier.""" + return "redis" + + def _build_key( + self, + context: StorageDriverStoreContext, + hash_digest: str, + ) -> str: + """Construct a context-aware Redis key for a payload digest.""" + + def _quote(value: str | None) -> str | None: + return urllib.parse.quote(value, safe="") if value else None + + segments = ["v0"] + target = context.target + namespace = _quote(target.namespace) if target is not None else None + if namespace: + segments.extend(["ns", namespace]) + + if isinstance(target, StorageDriverWorkflowInfo): + segments.extend( + [ + "wt", + _quote(target.type) or "null", + "wi", + _quote(target.id) or "null", + "ri", + _quote(target.run_id) or "null", + ] + ) + elif isinstance(target, StorageDriverActivityInfo): + segments.extend( + [ + "at", + _quote(target.type) or "null", + "ai", + _quote(target.id) or "null", + "ri", + _quote(target.run_id) or "null", + ] + ) + + segments.extend(["d", "sha256", hash_digest]) + if not self._key_prefix: + return ":".join(segments) + return f"{self._key_prefix}:{':'.join(segments)}" + + async def store( + self, + context: StorageDriverStoreContext, + payloads: Sequence[Payload], + ) -> list[StorageDriverClaim]: + """Store payloads in Redis and return a claim for each payload.""" + + async def _store_payload(payload: Payload) -> StorageDriverClaim: + payload_bytes = payload.SerializeToString() + payload_size = len(payload_bytes) + if payload_size > self._max_payload_size: + raise ValueError( + f"Payload size {payload_size} bytes exceeds the configured " + f"max_payload_size of {self._max_payload_size} bytes" + ) + + hash_digest = hashlib.sha256(payload_bytes).hexdigest().lower() + key = self._build_key(context, hash_digest) + + try: + await self._client.set_if_absent( + key=key, + data=payload_bytes, + ttl=self._ttl, + ) + except Exception as err: + raise RuntimeError( + f"RedisStorageDriver store failed [key={key}]" + ) from err + + return StorageDriverClaim( + claim_data={ + "key": key, + "hash_algorithm": "sha256", + "hash_value": hash_digest, + }, + ) + + return await _gather_with_cancellation([_store_payload(p) for p in payloads]) + + async def retrieve( + self, + context: StorageDriverRetrieveContext, # noqa: ARG002 + claims: Sequence[StorageDriverClaim], + ) -> list[Payload]: + """Retrieve payloads from Redis for the given claims.""" + + async def _retrieve_payload(claim: StorageDriverClaim) -> Payload: + key = claim.claim_data["key"] + + try: + payload_bytes = await self._client.get(key=key) + except Exception as err: + raise RuntimeError( + f"RedisStorageDriver retrieve failed [key={key}]" + ) from err + + if payload_bytes is None: + raise ApplicationError( + f"Payload not found for key '{key}'", + type="PayloadNotFoundError", + non_retryable=True, + ) + + expected_hash = claim.claim_data.get("hash_value") + hash_algorithm = claim.claim_data.get("hash_algorithm") + if expected_hash and hash_algorithm: + if hash_algorithm != "sha256": + raise ValueError( + f"RedisStorageDriver unsupported hash algorithm " + f"[key={key}]: expected sha256, got {hash_algorithm}" + ) + actual_hash = hashlib.sha256(payload_bytes).hexdigest().lower() + if actual_hash != expected_hash: + raise ValueError( + f"RedisStorageDriver integrity check failed " + f"[key={key}]: expected {hash_algorithm}:{expected_hash}, " + f"got {hash_algorithm}:{actual_hash}" + ) + + payload = Payload() + payload.ParseFromString(payload_bytes) + return payload + + return await _gather_with_cancellation( + [_retrieve_payload(claim) for claim in claims] + ) diff --git a/external_storage_redis/redis_asyncio.py b/external_storage_redis/redis_asyncio.py new file mode 100644 index 00000000..6f7ca312 --- /dev/null +++ b/external_storage_redis/redis_asyncio.py @@ -0,0 +1,52 @@ +"""redis.asyncio adapter for the Redis storage driver client.""" + +from __future__ import annotations + +import math +from datetime import timedelta + +from redis.asyncio.client import Redis + +from external_storage_redis._client import RedisStorageDriverClient + + +class _RedisAsyncioStorageDriverClient(RedisStorageDriverClient): + """Adapter that wraps a ``redis.asyncio.Redis`` client. + + The wrapped client must be configured for binary-safe reads and writes, + which means ``decode_responses`` must remain disabled. + """ + + def __init__(self, client: Redis) -> None: + """Wrap a ``redis.asyncio.Redis`` client.""" + self._client = client + + async def get(self, *, key: str) -> bytes | None: + """Fetch raw bytes for *key* from Redis.""" + value = await self._client.get(key) + if value is None: + return None + if not isinstance(value, bytes): + raise TypeError( + "redis.asyncio client must be configured with decode_responses=False" + ) + return value + + async def set_if_absent( + self, + *, + key: str, + data: bytes, + ttl: timedelta | None = None, + ) -> bool: + """Atomically set *key* only when it is absent.""" + ttl_ms = None + if ttl is not None: + ttl_ms = max(1, math.ceil(ttl.total_seconds() * 1000)) + result = await self._client.set(key, data, px=ttl_ms, nx=True) + return bool(result) + + +def new_redis_asyncio_client(client: Redis) -> RedisStorageDriverClient: + """Create a driver client from a ``redis.asyncio.Redis`` instance.""" + return _RedisAsyncioStorageDriverClient(client) diff --git a/external_storage_redis/workflows.py b/external_storage_redis/workflows.py new file mode 100644 index 00000000..2a3de23a --- /dev/null +++ b/external_storage_redis/workflows.py @@ -0,0 +1,194 @@ +"""Workflows and activities used by the Redis external storage sample tests.""" + +from __future__ import annotations + +from datetime import timedelta + +from temporalio import activity, workflow +from temporalio.common import RetryPolicy + +LARGE = "x" * 356 # ~358 bytes as a JSON string, above the 256-byte test threshold +LARGE_2 = "y" * 356 # distinct large payload with a different SHA-256 hash + + +@activity.defn +async def large_io_activity(_data: str) -> str: + return LARGE + + +@activity.defn +async def large_output_activity() -> str: + """Return a large payload with no retries; used to test store failures.""" + return LARGE + + +@workflow.defn +class LargeOutputNoRetryWorkflow: + """Execute one activity that returns a large payload with no retries.""" + + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + large_output_activity, + schedule_to_close_timeout=timedelta(seconds=10), + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + +@workflow.defn +class LargeIOWorkflow: + """Pass workflow input to an activity and return a large output.""" + + @workflow.run + async def run(self, data: str) -> str: + await workflow.execute_activity( + large_io_activity, + data, + schedule_to_close_timeout=timedelta(seconds=10), + ) + return LARGE + + +@activity.defn +async def download_document(document_id: str) -> str: + """Download the raw document content from remote storage.""" + del document_id + return LARGE + + +@activity.defn +async def extract_text(raw_content: str) -> str: + """Extract and normalize text from the raw document content.""" + del raw_content + return LARGE_2 + + +@activity.defn +async def index_document(text: str) -> str: + """Index the extracted text into the search index.""" + del text + return "idx-00001" + + +@workflow.defn +class DocumentIngestionWorkflow: + """Download, extract, and index a document through large payload hops.""" + + @workflow.run + async def run(self, document_id: str) -> str: + raw_content = await workflow.execute_activity( + download_document, + document_id, + schedule_to_close_timeout=timedelta(seconds=10), + ) + extracted_text = await workflow.execute_activity( + extract_text, + raw_content, + schedule_to_close_timeout=timedelta(seconds=10), + ) + return await workflow.execute_activity( + index_document, + extracted_text, + schedule_to_close_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class ChildWorkflow: + @workflow.run + async def run(self, data: str) -> str: + return f"{len(data)}" + + +@workflow.defn +class ParentWithChildWorkflow: + """Delegate work to a child workflow whose ID is ``{parent_id}-child``.""" + + @workflow.run + async def run(self) -> str: + child_id = f"{workflow.info().workflow_id}-child" + return await workflow.execute_child_workflow( + ChildWorkflow.run, + LARGE, + id=child_id, + execution_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class PaymentProcessingWorkflow: + """Process payment for an order and return a large confirmation payload.""" + + @workflow.run + async def run(self, order_details: str) -> str: + del order_details + return LARGE_2 + + +@workflow.defn +class OrderFulfillmentWorkflow: + """Coordinate order fulfillment by delegating payment to a child workflow.""" + + @workflow.run + async def run(self, order_details: str) -> str: + payment_id = f"{workflow.info().workflow_id}-payment" + return await workflow.execute_child_workflow( + PaymentProcessingWorkflow.run, + order_details, + id=payment_id, + execution_timeout=timedelta(seconds=10), + ) + + +@workflow.defn +class ModelTrainingWorkflow: + """Simulate a long-running training job with large cross-boundary payloads.""" + + def __init__(self) -> None: + self._done = False + + @workflow.run + async def run(self, training_config: str) -> str: + del training_config + await workflow.wait_condition(lambda: self._done) + return LARGE + + @workflow.signal + async def apply_overrides(self, override_params: str) -> None: + """Inject updated configuration into the running training job.""" + del override_params + + @workflow.signal + async def complete(self) -> None: + self._done = True + + @workflow.update + async def get_metrics(self, checkpoint_id: str) -> str: + """Return the current training metrics snapshot.""" + del checkpoint_id + return LARGE_2 + + +@workflow.defn +class SignalQueryUpdateWorkflow: + """Long-running workflow that accepts a signal, query, and update.""" + + def __init__(self) -> None: + self._done = False + + @workflow.run + async def run(self) -> str: + await workflow.wait_condition(lambda: self._done) + return LARGE + + @workflow.signal + async def finish(self, _data: str) -> None: + self._done = True + + @workflow.query + def get_value(self, _data: str) -> str: + return LARGE + + @workflow.update + async def do_update(self, _data: str) -> str: + return LARGE diff --git a/pyproject.toml b/pyproject.toml index caae123c..99266054 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ dev = [ "mypy>=1.4.1,<2", "pytest>=7.1.2,<8", "pytest-asyncio>=0.18.3,<0.19", + "fakeredis>=2,<3", "frozenlist>=1.4.0,<2", "pyright>=1.1.394", "types-pyyaml>=6.0.12.20241230,<7", @@ -29,6 +30,7 @@ dev = [ bedrock = ["boto3>=1.34.92,<2"] dsl = ["pyyaml>=6.0.1,<7", "types-pyyaml>=6.0.12,<7", "dacite>=1.8.1,<2"] encryption = ["cryptography>=38.0.1,<39", "aiohttp>=3.8.1,<4"] +external-storage-redis = ["redis>=5.0.0,<8"] gevent = ["gevent>=25.4.2 ; python_version >= '3.8'"] langchain = [ "langchain>=0.1.7,<0.2 ; python_version >= '3.8.1' and python_version < '4.0'", @@ -77,6 +79,7 @@ packages = [ "custom_metric", "dsl", "encryption", + "external_storage_redis", "gevent_async", "hello", "langchain", diff --git a/tests/external_storage_redis/__init__.py b/tests/external_storage_redis/__init__.py new file mode 100644 index 00000000..0d45e2f5 --- /dev/null +++ b/tests/external_storage_redis/__init__.py @@ -0,0 +1 @@ +"""Tests for the Redis external storage sample.""" diff --git a/tests/external_storage_redis/conftest.py b/tests/external_storage_redis/conftest.py new file mode 100644 index 00000000..5684a7c3 --- /dev/null +++ b/tests/external_storage_redis/conftest.py @@ -0,0 +1,36 @@ +"""Shared fixtures for Redis external storage tests.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator + +import fakeredis.aioredis +import pytest +import pytest_asyncio +from redis.asyncio.client import Redis + +from external_storage_redis import RedisStorageDriverClient +from external_storage_redis.redis_asyncio import new_redis_asyncio_client + +KEY_PREFIX = "test:payloads" + + +@pytest_asyncio.fixture +async def redis_asyncio_client() -> AsyncIterator[Redis]: + """Yield a fake Redis client with an empty database for each test.""" + client = fakeredis.aioredis.FakeRedis(decode_responses=False) + await client.flushdb() + try: + yield client + finally: + aclose = getattr(client, "aclose", None) + if aclose is not None: + await aclose() + else: + await client.close() + + +@pytest.fixture +def driver_client(redis_asyncio_client: Redis) -> RedisStorageDriverClient: + """Wrap the redis.asyncio client in a RedisStorageDriverClient adapter.""" + return new_redis_asyncio_client(redis_asyncio_client) diff --git a/tests/external_storage_redis/test_redis.py b/tests/external_storage_redis/test_redis.py new file mode 100644 index 00000000..30e174de --- /dev/null +++ b/tests/external_storage_redis/test_redis.py @@ -0,0 +1,721 @@ +"""Unit tests for RedisStorageDriver using fakeredis.""" + +from __future__ import annotations + +import asyncio +import hashlib +from collections.abc import Callable, Coroutine +from datetime import timedelta +from functools import wraps +from typing import Any +from unittest.mock import MagicMock + +import fakeredis.aioredis +import pytest + +from temporalio.api.common.v1 import Payload +from temporalio.converter import ( + JSONPlainPayloadConverter, + StorageDriverActivityInfo, + StorageDriverClaim, + StorageDriverRetrieveContext, + StorageDriverStoreContext, + StorageDriverWorkflowInfo, +) +from temporalio.exceptions import ApplicationError + +from external_storage_redis import RedisStorageDriver, RedisStorageDriverClient +from external_storage_redis.redis_asyncio import new_redis_asyncio_client +from tests.external_storage_redis.conftest import KEY_PREFIX + +_CONVERTER = JSONPlainPayloadConverter() + + +def make_payload(value: str = "hello") -> Payload: + payload = _CONVERTER.to_payload(value) + assert payload is not None + return payload + + +def make_store_context( + target: StorageDriverActivityInfo | StorageDriverWorkflowInfo | None = None, +) -> StorageDriverStoreContext: + return StorageDriverStoreContext(target=target) + + +def make_workflow_context( + namespace: str = "my-namespace", + workflow_id: str = "my-workflow", + workflow_type: str | None = None, + run_id: str | None = None, +) -> StorageDriverStoreContext: + return make_store_context( + target=StorageDriverWorkflowInfo( + id=workflow_id, + type=workflow_type, + run_id=run_id, + namespace=namespace, + ) + ) + + +def make_activity_context( + namespace: str = "my-namespace", + activity_id: str | None = "my-activity", + activity_type: str | None = None, + run_id: str | None = None, +) -> StorageDriverStoreContext: + return make_store_context( + target=StorageDriverActivityInfo( + id=activity_id, + type=activity_type, + run_id=run_id, + namespace=namespace, + ) + ) + + +async def _list_keys(redis_asyncio_client: Any) -> list[str]: + raw_keys = await redis_asyncio_client.keys("*") + return sorted(key.decode() if isinstance(key, bytes) else key for key in raw_keys) + + +async def _wait_for_key_absent(redis_asyncio_client: Any, key: str) -> None: + async def _poll() -> None: + while await redis_asyncio_client.exists(key): + await asyncio.sleep(0.01) + + await asyncio.wait_for(_poll(), timeout=2) + + +class CountingDriverClient(RedisStorageDriverClient): + """RedisStorageDriverClient wrapper that counts calls.""" + + def __init__(self, delegate: RedisStorageDriverClient) -> None: + self._delegate = delegate + self.get_count = 0 + self.set_if_absent_count = 0 + self.insert_count = 0 + + async def get(self, *, key: str) -> bytes | None: + self.get_count += 1 + return await self._delegate.get(key=key) + + async def set_if_absent( + self, + *, + key: str, + data: bytes, + ttl: timedelta | None = None, + ) -> bool: + self.set_if_absent_count += 1 + inserted = await self._delegate.set_if_absent(key=key, data=data, ttl=ttl) + if inserted: + self.insert_count += 1 + return inserted + + +class FailOnceDriverClient(RedisStorageDriverClient): + """RedisStorageDriverClient wrapper that fails one call then blocks.""" + + def __init__(self, delegate: RedisStorageDriverClient, fail_on: str) -> None: + self._delegate = delegate + self._fail_on = fail_on + self._call_count = 0 + self.cancelled: list[bool] = [] + + async def _maybe_fail(self) -> None: + self._call_count += 1 + if self._call_count == 1: + raise ConnectionError("Redis connection lost") + try: + await asyncio.sleep(60) + except asyncio.CancelledError: + self.cancelled.append(True) + raise + + async def get(self, *, key: str) -> bytes | None: + if self._fail_on == "get": + await self._maybe_fail() + return await self._delegate.get(key=key) + + async def set_if_absent( + self, + *, + key: str, + data: bytes, + ttl: timedelta | None = None, + ) -> bool: + if self._fail_on == "set_if_absent": + await self._maybe_fail() + return await self._delegate.set_if_absent(key=key, data=data, ttl=ttl) + + +class _AsyncBarrier: + """Minimal asyncio.Barrier equivalent for Python <3.11.""" + + def __init__(self, parties: int) -> None: + self._parties = parties + self._count = 0 + self._event = asyncio.Event() + + async def wait(self) -> None: + self._count += 1 + if self._count >= self._parties: + self._event.set() + else: + await self._event.wait() + + +def _barrier_wrapper( + fn: Callable[..., Coroutine[Any, Any, Any]], barrier: _AsyncBarrier +) -> Callable[..., Coroutine[Any, Any, Any]]: + """Wrap an async method to wait at a barrier before proceeding.""" + + @wraps(fn) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + await asyncio.wait_for(barrier.wait(), timeout=5) + return await fn(*args, **kwargs) + + return wrapper + + +@pytest.fixture +def counting_driver_client( + driver_client: RedisStorageDriverClient, +) -> CountingDriverClient: + """Wrap the driver client in a counting decorator.""" + return CountingDriverClient(driver_client) + + +class TestRedisStorageDriverInit: + def test_default_name(self) -> None: + driver = RedisStorageDriver(client=MagicMock(spec=RedisStorageDriverClient)) + assert driver.name() == "redis" + + def test_custom_name(self) -> None: + driver = RedisStorageDriver( + client=MagicMock(spec=RedisStorageDriverClient), + driver_name="my-redis", + ) + assert driver.name() == "my-redis" + + def test_type(self) -> None: + driver = RedisStorageDriver(client=MagicMock(spec=RedisStorageDriverClient)) + assert driver.type() == "redis" + + def test_ttl_zero_raises(self) -> None: + with pytest.raises(ValueError, match="ttl must be greater than zero"): + RedisStorageDriver( + client=MagicMock(spec=RedisStorageDriverClient), + ttl=timedelta(0), + ) + + def test_ttl_negative_raises(self) -> None: + with pytest.raises(ValueError, match="ttl must be greater than zero"): + RedisStorageDriver( + client=MagicMock(spec=RedisStorageDriverClient), + ttl=timedelta(seconds=-1), + ) + + def test_max_payload_size_zero_raises(self) -> None: + with pytest.raises( + ValueError, match="max_payload_size must be greater than zero" + ): + RedisStorageDriver( + client=MagicMock(spec=RedisStorageDriverClient), + max_payload_size=0, + ) + + def test_max_payload_size_negative_raises(self) -> None: + with pytest.raises( + ValueError, match="max_payload_size must be greater than zero" + ): + RedisStorageDriver( + client=MagicMock(spec=RedisStorageDriverClient), + max_payload_size=-1, + ) + + +class TestRedisStorageDriverKeyConstruction: + async def test_key_context_none( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + [claim] = await driver.store(make_store_context(), [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["key"] == f"{KEY_PREFIX}:v0:d:sha256:{expected_hash}" + + async def test_key_context_workflow( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_workflow_context(namespace="ns1", workflow_id="wf1") + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:wt:null:wi:wf1:ri:null:d:sha256:{expected_hash}" + ) + + async def test_key_context_workflow_with_type_and_run_id( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_workflow_context( + namespace="ns1", + workflow_id="wf1", + workflow_type="MyWorkflow", + run_id="run-abc", + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:wt:MyWorkflow:wi:wf1:ri:run-abc:d:sha256:{expected_hash}" + ) + + async def test_key_context_activity( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_activity_context(namespace="ns1", activity_id="act1") + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:at:null:ai:act1:ri:null:d:sha256:{expected_hash}" + ) + + async def test_key_context_activity_with_type_and_run_id( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_activity_context( + namespace="ns1", + activity_id="act1", + activity_type="MyActivity", + run_id="run-abc", + ) + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:at:MyActivity:ai:act1:ri:run-abc:d:sha256:{expected_hash}" + ) + + async def test_key_preserves_case( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_workflow_context(namespace="MyNamespace", workflow_id="MyWorkflow") + [claim] = await driver.store(ctx, [payload]) + key = claim.claim_data["key"] + assert "MyNamespace" in key + assert "MyWorkflow" in key + + async def test_key_urlencodes_workflow_id_with_slashes( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_workflow_context(namespace="ns1", workflow_id="order/123/v2") + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:wt:null:wi:order%2F123%2Fv2:ri:null:d:sha256:{expected_hash}" + ) + + async def test_key_urlencodes_workflow_id_with_special_chars( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_workflow_context(namespace="ns1", workflow_id="wf#1 &foo=bar") + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:wt:null:wi:wf%231%20%26foo%3Dbar:ri:null:d:sha256:{expected_hash}" + ) + + async def test_key_urlencodes_activity_id( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_activity_context(namespace="ns1", activity_id="act/1#2") + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:ns1:at:null:ai:act%2F1%232:ri:null:d:sha256:{expected_hash}" + ) + + async def test_key_urlencodes_namespace( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + ctx = make_workflow_context(namespace="my/ns#1", workflow_id="wf1") + [claim] = await driver.store(ctx, [payload]) + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert ( + claim.claim_data["key"] + == f"{KEY_PREFIX}:v0:ns:my%2Fns%231:wt:null:wi:wf1:ri:null:d:sha256:{expected_hash}" + ) + + async def test_key_urlencoded_roundtrip( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("special-char-roundtrip") + ctx = make_workflow_context(namespace="ns/1", workflow_id="wf/2#3") + [claim] = await driver.store(ctx, [payload]) + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert retrieved == payload + + +class TestRedisStorageDriverStoreRetrieve: + async def test_store_returns_claim_with_key_and_hash( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload() + [claim] = await driver.store(make_store_context(), [payload]) + assert claim.claim_data["key"].startswith(f"{KEY_PREFIX}:v0:") + assert claim.claim_data["hash_algorithm"] == "sha256" + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + assert claim.claim_data["hash_value"] == expected_hash + + async def test_roundtrip_single_payload( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("round-trip value") + [claim] = await driver.store(make_store_context(), [payload]) + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert retrieved == payload + + async def test_roundtrip_multiple_payloads( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payloads = [make_payload(f"value-{i}") for i in range(3)] + claims = await driver.store(make_store_context(), payloads) + retrieved = await driver.retrieve(StorageDriverRetrieveContext(), claims) + assert retrieved == payloads + + async def test_empty_payloads_returns_empty_list( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + assert await driver.store(make_store_context(), []) == [] + assert await driver.retrieve(StorageDriverRetrieveContext(), []) == [] + + async def test_content_addressable_deduplication( + self, redis_asyncio_client: Any, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("same-value") + claims = await driver.store(make_store_context(), [payload, payload]) + assert claims[0].claim_data["key"] == claims[1].claim_data["key"] + assert await _list_keys(redis_asyncio_client) == [claims[0].claim_data["key"]] + + async def test_set_if_absent_only_inserts_once( + self, counting_driver_client: CountingDriverClient + ) -> None: + driver = RedisStorageDriver( + client=counting_driver_client, + key_prefix=KEY_PREFIX, + ) + payload = make_payload("insert-once") + + await driver.store(make_store_context(), [payload]) + await driver.store(make_store_context(), [payload]) + + assert counting_driver_client.set_if_absent_count == 2 + assert counting_driver_client.insert_count == 1 + + async def test_duplicate_store_preserves_data( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("preserve-me") + + [claim1] = await driver.store(make_store_context(), [payload]) + [claim2] = await driver.store(make_store_context(), [payload]) + assert claim1 == claim2 + + [retrieved] = await driver.retrieve(StorageDriverRetrieveContext(), [claim2]) + assert retrieved == payload + + async def test_ttl_is_applied_on_first_write( + self, redis_asyncio_client: Any, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver( + client=driver_client, + key_prefix=KEY_PREFIX, + ttl=timedelta(seconds=5), + ) + [claim] = await driver.store(make_store_context(), [make_payload("ttl")]) + ttl_ms = await redis_asyncio_client.pttl(claim.claim_data["key"]) + assert 0 < ttl_ms <= 5000 + + async def test_ttl_is_not_refreshed_on_duplicate_write( + self, redis_asyncio_client: Any, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver( + client=driver_client, + key_prefix=KEY_PREFIX, + ttl=timedelta(seconds=5), + ) + payload = make_payload("ttl-stable") + [claim] = await driver.store(make_store_context(), [payload]) + first_ttl_ms = await redis_asyncio_client.pttl(claim.claim_data["key"]) + await asyncio.sleep(0.02) + await driver.store(make_store_context(), [payload]) + second_ttl_ms = await redis_asyncio_client.pttl(claim.claim_data["key"]) + assert 0 < second_ttl_ms < first_ttl_ms + + async def test_retrieve_validates_hash( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("check-integrity") + [claim] = await driver.store(make_store_context(), [payload]) + + tampered_claim = StorageDriverClaim( + claim_data={ + **claim.claim_data, + "hash_value": "0" * 64, + }, + ) + with pytest.raises( + ValueError, + match=r"RedisStorageDriver integrity check failed \[key=.+\]: expected sha256:.+, got sha256:.+", + ): + await driver.retrieve(StorageDriverRetrieveContext(), [tampered_claim]) + + async def test_retrieve_rejects_unsupported_hash_algorithm( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("unsupported-algo") + [claim] = await driver.store(make_store_context(), [payload]) + + bad_claim = StorageDriverClaim( + claim_data={ + **claim.claim_data, + "hash_algorithm": "md5", + }, + ) + with pytest.raises( + ValueError, + match=r"RedisStorageDriver unsupported hash algorithm \[key=.+\]: expected sha256, got md5", + ): + await driver.retrieve(StorageDriverRetrieveContext(), [bad_claim]) + + async def test_retrieve_without_hash_in_claim( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payload = make_payload("no-hash-claim") + [claim] = await driver.store(make_store_context(), [payload]) + + legacy_claim = StorageDriverClaim( + claim_data={ + "key": claim.claim_data["key"], + }, + ) + [retrieved] = await driver.retrieve( + StorageDriverRetrieveContext(), [legacy_claim] + ) + assert retrieved == payload + + async def test_retrieve_missing_key_raises_non_retryable_application_error( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + claim = StorageDriverClaim( + claim_data={"key": f"{KEY_PREFIX}:v0:d:sha256:missing"} + ) + with pytest.raises(ApplicationError) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert exc_info.value.message == ( + f"Payload not found for key '{KEY_PREFIX}:v0:d:sha256:missing'" + ) + assert exc_info.value.type == "PayloadNotFoundError" + assert exc_info.value.non_retryable is True + + async def test_expired_key_raises_non_retryable_application_error( + self, redis_asyncio_client: Any, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver( + client=driver_client, + key_prefix=KEY_PREFIX, + ttl=timedelta(milliseconds=10), + ) + [claim] = await driver.store(make_store_context(), [make_payload("expire-me")]) + await _wait_for_key_absent(redis_asyncio_client, claim.claim_data["key"]) + with pytest.raises(ApplicationError) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert exc_info.value.type == "PayloadNotFoundError" + assert exc_info.value.non_retryable is True + + async def test_payload_exceeds_max_size_raises( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver( + client=driver_client, + key_prefix=KEY_PREFIX, + max_payload_size=10, + ) + with pytest.raises( + ValueError, + match=r"Payload size \d+ bytes exceeds the configured max_payload_size of 10 bytes", + ): + await driver.store(make_store_context(), [make_payload("exceeds-limit")]) + + async def test_payload_at_max_size_succeeds( + self, driver_client: RedisStorageDriverClient + ) -> None: + payload = make_payload("x") + driver = RedisStorageDriver( + client=driver_client, + key_prefix=KEY_PREFIX, + max_payload_size=len(payload.SerializeToString()), + ) + await driver.store(make_store_context(), [payload]) + + +class TestRedisAsyncioAdapter: + async def test_decode_responses_client_raises(self) -> None: + client = fakeredis.aioredis.FakeRedis(decode_responses=True) + try: + await client.set("adapter:text", b"abc") + adapter = new_redis_asyncio_client(client) + with pytest.raises(TypeError, match="decode_responses=False"): + await adapter.get(key="adapter:text") + finally: + aclose = getattr(client, "aclose", None) + if aclose is not None: + await aclose() + else: + await client.close() + + +class TestRedisStorageDriverErrors: + async def test_store_client_failure_raises( + self, driver_client: RedisStorageDriverClient + ) -> None: + faulty_client = FailOnceDriverClient( + delegate=driver_client, fail_on="set_if_absent" + ) + driver = RedisStorageDriver(client=faulty_client, key_prefix=KEY_PREFIX) + payload = make_payload() + expected_hash = hashlib.sha256(payload.SerializeToString()).hexdigest() + expected_key = f"{KEY_PREFIX}:v0:d:sha256:{expected_hash}" + with pytest.raises(RuntimeError) as exc_info: + await driver.store(make_store_context(), [payload]) + assert str(exc_info.value) == f"RedisStorageDriver store failed [key={expected_key}]" + assert isinstance(exc_info.value.__cause__, ConnectionError) + + async def test_retrieve_client_failure_raises( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + [claim] = await driver.store(make_store_context(), [make_payload("value")]) + + faulty_client = FailOnceDriverClient(delegate=driver_client, fail_on="get") + driver = RedisStorageDriver(client=faulty_client, key_prefix=KEY_PREFIX) + with pytest.raises(RuntimeError) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), [claim]) + assert ( + str(exc_info.value) + == f"RedisStorageDriver retrieve failed [key={claim.claim_data['key']}]" + ) + assert isinstance(exc_info.value.__cause__, ConnectionError) + + +class TestRedisStorageDriverConcurrency: + async def test_store_payloads_concurrently( + self, driver_client: RedisStorageDriverClient + ) -> None: + num_payloads = 5 + barrier = _AsyncBarrier(num_payloads) + driver_client.set_if_absent = _barrier_wrapper( # type: ignore[method-assign] + driver_client.set_if_absent, + barrier, + ) + + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payloads = [make_payload(f"concurrent-store-{i}") for i in range(num_payloads)] + + claims = await driver.store(make_store_context(), payloads) + assert len(claims) == num_payloads + + async def test_retrieve_payloads_concurrently( + self, driver_client: RedisStorageDriverClient + ) -> None: + num_payloads = 5 + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payloads = [make_payload(f"concurrent-retrieve-{i}") for i in range(num_payloads)] + claims = await driver.store(make_store_context(), payloads) + + barrier = _AsyncBarrier(num_payloads) + driver_client.get = _barrier_wrapper(driver_client.get, barrier) # type: ignore[method-assign] + + retrieved = await driver.retrieve(StorageDriverRetrieveContext(), claims) + assert retrieved == payloads + + async def test_store_cancels_remaining_on_failure( + self, driver_client: RedisStorageDriverClient + ) -> None: + faulty_client = FailOnceDriverClient( + delegate=driver_client, + fail_on="set_if_absent", + ) + driver = RedisStorageDriver(client=faulty_client, key_prefix=KEY_PREFIX) + payloads = [make_payload(f"cancel-store-{i}") for i in range(3)] + + with pytest.raises( + RuntimeError, + match=r"RedisStorageDriver store failed \[key=.+\]", + ) as exc_info: + await driver.store(make_store_context(), payloads) + + assert isinstance(exc_info.value.__cause__, ConnectionError) + assert str(exc_info.value.__cause__) == "Redis connection lost" + assert ( + len(faulty_client.cancelled) == 2 + ), "Expected 2 remaining tasks to be cancelled" + + async def test_retrieve_cancels_remaining_on_failure( + self, driver_client: RedisStorageDriverClient + ) -> None: + driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) + payloads = [make_payload(f"cancel-retrieve-{i}") for i in range(3)] + claims = await driver.store(make_store_context(), payloads) + + faulty_client = FailOnceDriverClient(delegate=driver_client, fail_on="get") + driver = RedisStorageDriver(client=faulty_client, key_prefix=KEY_PREFIX) + + with pytest.raises( + RuntimeError, + match=r"RedisStorageDriver retrieve failed \[key=.+\]", + ) as exc_info: + await driver.retrieve(StorageDriverRetrieveContext(), claims) + + assert isinstance(exc_info.value.__cause__, ConnectionError) + assert str(exc_info.value.__cause__) == "Redis connection lost" + assert ( + len(faulty_client.cancelled) == 2 + ), "Expected 2 remaining tasks to be cancelled" diff --git a/tests/external_storage_redis/test_redis_worker.py b/tests/external_storage_redis/test_redis_worker.py new file mode 100644 index 00000000..7c8d1a28 --- /dev/null +++ b/tests/external_storage_redis/test_redis_worker.py @@ -0,0 +1,528 @@ +"""Worker integration tests for RedisStorageDriver key structure.""" + +from __future__ import annotations + +import dataclasses +import hashlib +import uuid +from collections.abc import AsyncIterator, Callable, Sequence +from datetime import timedelta +from typing import Any + +import pytest +import pytest_asyncio + +import temporalio.converter +from temporalio.client import Client, WorkflowFailureError +from temporalio.converter import ExternalStorage, JSONPlainPayloadConverter +from temporalio.exceptions import ActivityError, ApplicationError +from temporalio.service import RPCError +from temporalio.testing import WorkflowEnvironment +from temporalio.worker import Worker + +from external_storage_redis import RedisStorageDriver, RedisStorageDriverClient +from external_storage_redis.redis_asyncio import new_redis_asyncio_client +from external_storage_redis.workflows import ( + LARGE, + LARGE_2, + ChildWorkflow, + DocumentIngestionWorkflow, + LargeIOWorkflow, + LargeOutputNoRetryWorkflow, + ModelTrainingWorkflow, + OrderFulfillmentWorkflow, + ParentWithChildWorkflow, + PaymentProcessingWorkflow, + SignalQueryUpdateWorkflow, + download_document, + extract_text, + index_document, + large_io_activity, + large_output_activity, +) +from tests.external_storage_redis.conftest import KEY_PREFIX + +_THRESHOLD = 256 + + +def new_worker( + client: Client, + *workflows: type, + activities: Sequence[Callable[..., Any]] = (), + task_queue: str | None = None, + **kwargs: Any, +) -> Worker: + return Worker( + client, + task_queue=task_queue or str(uuid.uuid4()), + workflows=workflows, + activities=activities, + **kwargs, + ) + + +class FailingSetClient(RedisStorageDriverClient): + """Redis client wrapper that fails all writes.""" + + def __init__(self, delegate: RedisStorageDriverClient) -> None: + self._delegate = delegate + + async def get(self, *, key: str) -> bytes | None: + return await self._delegate.get(key=key) + + async def set_if_absent( + self, + *, + key: str, + data: bytes, + ttl: timedelta | None = None, + ) -> bool: + del key, data, ttl + raise ConnectionError("Redis is unavailable") + + +@pytest_asyncio.fixture +async def tmprl_client( + env: WorkflowEnvironment, redis_asyncio_client: Any +) -> AsyncIterator[Client]: + """Temporal client wired with ExternalStorage backed by fake Redis.""" + driver = RedisStorageDriver( + client=new_redis_asyncio_client(redis_asyncio_client), + key_prefix=KEY_PREFIX, + ) + yield await Client.connect( + env.client.service_client.config.target_host, + namespace=env.client.namespace, + data_converter=dataclasses.replace( + temporalio.converter.default(), + external_storage=ExternalStorage( + drivers=[driver], + payload_size_threshold=_THRESHOLD, + ), + ), + ) + + +async def _list_keys(redis_asyncio_client: Any) -> list[str]: + raw_keys = await redis_asyncio_client.keys(f"{KEY_PREFIX}:*") + return sorted(key.decode() if isinstance(key, bytes) else key for key in raw_keys) + + +def _skip_if_standalone_activity_disabled(err: RPCError) -> None: + if "Standalone activity is disabled" in str(err): + pytest.skip( + "Standalone Activity is not supported by this Temporal dev server build" + ) + + +async def test_redis_driver_workflow_input_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert all( + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" in key + for key in keys + ) + assert sum(1 for key in keys if ":ri:null:" in key) == 1 + assert sum(1 for key in keys if ":ri:null:" not in key) == 1 + + +async def test_redis_driver_workflow_output_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + result = await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + "small", + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + assert result == LARGE + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 1 + assert ( + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" + in keys[0] + ) + assert ":ri:null:" not in keys[0] + + +async def test_redis_driver_workflow_activity_input_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert all( + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" in key + for key in keys + ) + assert all(":ai:" not in key for key in keys) + + +async def test_redis_driver_workflow_activity_output_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + "small", + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 1 + assert ( + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" + in keys[0] + ) + assert ":ri:null:" not in keys[0] + + +async def test_redis_driver_standalone_activity_input_key( + env: WorkflowEnvironment, + tmprl_client: Client, + redis_asyncio_client: Any, +) -> None: + if env.supports_time_skipping: + pytest.skip( + "Java test server: https://github.com/temporalio/sdk-java/issues/2741" + ) + activity_id = str(uuid.uuid4()) + task_queue = str(uuid.uuid4()) + async with new_worker( + tmprl_client, activities=[large_io_activity], task_queue=task_queue + ): + try: + await tmprl_client.execute_activity( + large_io_activity, + LARGE, + id=activity_id, + task_queue=task_queue, + start_to_close_timeout=timedelta(seconds=5), + ) + except RPCError as err: + _skip_if_standalone_activity_disabled(err) + raise + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert all( + f"{KEY_PREFIX}:v0:ns:default:at:large_io_activity:ai:{activity_id}:ri:" + in key + for key in keys + ) + assert all(":wt:" not in key for key in keys) + assert sum(1 for key in keys if ":ri:null:" in key) == 1 + assert sum(1 for key in keys if ":ri:null:" not in key) == 1 + + +async def test_redis_driver_standalone_activity_output_key( + env: WorkflowEnvironment, + tmprl_client: Client, + redis_asyncio_client: Any, +) -> None: + if env.supports_time_skipping: + pytest.skip( + "Java test server: https://github.com/temporalio/sdk-java/issues/2741" + ) + activity_id = str(uuid.uuid4()) + task_queue = str(uuid.uuid4()) + async with new_worker( + tmprl_client, activities=[large_output_activity], task_queue=task_queue + ): + try: + await tmprl_client.execute_activity( + large_output_activity, + id=activity_id, + task_queue=task_queue, + start_to_close_timeout=timedelta(seconds=5), + ) + except RPCError as err: + _skip_if_standalone_activity_disabled(err) + raise + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 1 + assert ( + f"{KEY_PREFIX}:v0:ns:default:at:large_output_activity:ai:{activity_id}:ri:" + in keys[0] + ) + assert ":ri:null:" not in keys[0] + assert ":wt:" not in keys[0] + + +async def test_redis_driver_signal_arg_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, SignalQueryUpdateWorkflow) as worker: + handle = await tmprl_client.start_workflow( + SignalQueryUpdateWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + await handle.signal(SignalQueryUpdateWorkflow.finish, LARGE) + await handle.result() + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert any( + f"{KEY_PREFIX}:v0:ns:default:wt:null:wi:{workflow_id}:ri:null:" in key + for key in keys + ) + assert any( + f"{KEY_PREFIX}:v0:ns:default:wt:SignalQueryUpdateWorkflow:wi:{workflow_id}:" + in key + for key in keys + ) + + +async def test_redis_driver_query_result_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, SignalQueryUpdateWorkflow) as worker: + handle = await tmprl_client.start_workflow( + SignalQueryUpdateWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + result = await handle.query(SignalQueryUpdateWorkflow.get_value, LARGE) + assert result == LARGE + await handle.signal(SignalQueryUpdateWorkflow.finish, "done") + await handle.result() + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert any( + f"{KEY_PREFIX}:v0:ns:default:wt:null:wi:{workflow_id}:ri:null:" in key + for key in keys + ) + assert any( + f"{KEY_PREFIX}:v0:ns:default:wt:SignalQueryUpdateWorkflow:wi:{workflow_id}:" + in key + for key in keys + ) + + +async def test_redis_driver_update_result_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, SignalQueryUpdateWorkflow) as worker: + handle = await tmprl_client.start_workflow( + SignalQueryUpdateWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + result = await handle.execute_update( + SignalQueryUpdateWorkflow.do_update, LARGE + ) + assert result == LARGE + await handle.signal(SignalQueryUpdateWorkflow.finish, "done") + await handle.result() + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert any( + f"{KEY_PREFIX}:v0:ns:default:wt:null:wi:{workflow_id}:ri:null:" in key + for key in keys + ) + assert any( + f"{KEY_PREFIX}:v0:ns:default:wt:SignalQueryUpdateWorkflow:wi:{workflow_id}:" + in key + for key in keys + ) + + +async def test_redis_driver_child_workflow_input_key( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, ParentWithChildWorkflow, ChildWorkflow + ) as worker: + await tmprl_client.execute_workflow( + ParentWithChildWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + child_workflow_id = f"{workflow_id}-child" + assert len(keys) == 1 + assert ( + f"{KEY_PREFIX}:v0:ns:default:wt:ChildWorkflow:wi:{child_workflow_id}:ri:" + in keys[0] + ) + + +async def test_redis_driver_identifier_casing( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = f"MyWorkflow-{uuid.uuid4()}" + async with new_worker( + tmprl_client, LargeIOWorkflow, activities=[large_io_activity] + ) as worker: + await tmprl_client.execute_workflow( + LargeIOWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert all( + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" in key + for key in keys + ) + + +async def test_redis_driver_content_dedup( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker( + tmprl_client, + DocumentIngestionWorkflow, + activities=[download_document, extract_text, index_document], + ) as worker: + await tmprl_client.execute_workflow( + DocumentIngestionWorkflow.run, + "doc-001", + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 2 + assert all( + f"{KEY_PREFIX}:v0:ns:default:wt:DocumentIngestionWorkflow:wi:{workflow_id}:ri:" + in key + for key in keys + ) + assert keys[0] != keys[1] + + +async def test_redis_driver_single_workflow_same_key_namespace( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + async with new_worker(tmprl_client, ModelTrainingWorkflow) as worker: + handle = await tmprl_client.start_workflow( + ModelTrainingWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + metrics = await handle.execute_update( + ModelTrainingWorkflow.get_metrics, "checkpoint-1" + ) + assert metrics == LARGE_2 + await handle.signal(ModelTrainingWorkflow.apply_overrides, LARGE) + await handle.signal(ModelTrainingWorkflow.complete) + await handle.result() + keys = await _list_keys(redis_asyncio_client) + assert len(keys) == 4 + assert all(f":wi:{workflow_id}:" in key for key in keys) + + +async def test_redis_driver_parent_child_independent_key_namespaces( + tmprl_client: Client, redis_asyncio_client: Any +) -> None: + workflow_id = str(uuid.uuid4()) + payment_id = f"{workflow_id}-payment" + async with new_worker( + tmprl_client, OrderFulfillmentWorkflow, PaymentProcessingWorkflow + ) as worker: + await tmprl_client.execute_workflow( + OrderFulfillmentWorkflow.run, + LARGE, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + keys = await _list_keys(redis_asyncio_client) + parent_keys = [key for key in keys if f":wi:{workflow_id}:" in key] + child_keys = [key for key in keys if f":wi:{payment_id}:" in key] + assert len(parent_keys) == 3 + assert len(child_keys) == 1 + + +async def test_redis_store_failure_surfaces_in_workflow_history( + env: WorkflowEnvironment, driver_client: RedisStorageDriverClient +) -> None: + driver = RedisStorageDriver( + client=FailingSetClient(driver_client), + key_prefix=KEY_PREFIX, + ) + bad_client = await Client.connect( + env.client.service_client.config.target_host, + namespace=env.client.namespace, + data_converter=dataclasses.replace( + temporalio.converter.default(), + external_storage=ExternalStorage( + drivers=[driver], + payload_size_threshold=_THRESHOLD, + ), + ), + ) + workflow_id = str(uuid.uuid4()) + async with new_worker( + bad_client, LargeOutputNoRetryWorkflow, activities=[large_output_activity] + ) as worker: + with pytest.raises(WorkflowFailureError) as exc_info: + await bad_client.execute_workflow( + LargeOutputNoRetryWorkflow.run, + id=workflow_id, + task_queue=worker.task_queue, + execution_timeout=timedelta(seconds=5), + ) + + large_payload = JSONPlainPayloadConverter().to_payload(LARGE) + assert large_payload is not None + expected_hash = hashlib.sha256(large_payload.SerializeToString()).hexdigest() + + activity_error = exc_info.value.__cause__ + assert isinstance(activity_error, ActivityError) + app_error = activity_error.__cause__ + assert isinstance(app_error, ApplicationError) + assert app_error.type == "RuntimeError" + msg = app_error.message + assert "RedisStorageDriver store failed [key=" in msg + assert ( + f"{KEY_PREFIX}:v0:ns:default:wt:LargeOutputNoRetryWorkflow:wi:{workflow_id}:ri:" + in msg + ) + assert f":d:sha256:{expected_hash}]" in msg diff --git a/uv.lock b/uv.lock index dd39daa7..a1331f1b 100644 --- a/uv.lock +++ b/uv.lock @@ -12,7 +12,7 @@ resolution-markers = [ [[package]] name = "aiohappyeyeballs" version = "2.6.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, @@ -21,7 +21,7 @@ wheels = [ [[package]] name = "aiohttp" version = "3.12.14" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, { name = "aiosignal" }, @@ -107,7 +107,7 @@ wheels = [ [[package]] name = "aiosignal" version = "1.4.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, @@ -120,7 +120,7 @@ wheels = [ [[package]] name = "annotated-types" version = "0.7.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, @@ -129,7 +129,7 @@ wheels = [ [[package]] name = "anyio" version = "4.9.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, @@ -144,7 +144,7 @@ wheels = [ [[package]] name = "async-timeout" version = "4.0.3" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/87/d6/21b30a550dafea84b1b8eee21b5e23fa16d010ae006011221f33dcd8d7f8/async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", size = 8345, upload-time = "2023-08-10T16:35:56.907Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028", size = 5721, upload-time = "2023-08-10T16:35:55.203Z" }, @@ -153,7 +153,7 @@ wheels = [ [[package]] name = "attrs" version = "25.3.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, @@ -162,7 +162,7 @@ wheels = [ [[package]] name = "boto3" version = "1.39.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, @@ -176,7 +176,7 @@ wheels = [ [[package]] name = "botocore" version = "1.39.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, @@ -190,7 +190,7 @@ wheels = [ [[package]] name = "certifi" version = "2025.7.9" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/de/8a/c729b6b60c66a38f590c4e774decc4b2ec7b0576be8f1aa984a53ffa812a/certifi-2025.7.9.tar.gz", hash = "sha256:c1d2ec05395148ee10cf672ffc28cd37ea0ab0d99f9cc74c43e588cbd111b079", size = 160386, upload-time = "2025-07-09T02:13:58.874Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/66/f3/80a3f974c8b535d394ff960a11ac20368e06b736da395b551a49ce950cce/certifi-2025.7.9-py3-none-any.whl", hash = "sha256:d842783a14f8fdd646895ac26f719a061408834473cfc10203f6a575beb15d39", size = 159230, upload-time = "2025-07-09T02:13:57.007Z" }, @@ -199,7 +199,7 @@ wheels = [ [[package]] name = "cffi" version = "1.17.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] @@ -256,7 +256,7 @@ wheels = [ [[package]] name = "charset-normalizer" version = "3.4.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, @@ -317,7 +317,7 @@ wheels = [ [[package]] name = "click" version = "8.2.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] @@ -329,7 +329,7 @@ wheels = [ [[package]] name = "colorama" version = "0.4.6" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, @@ -338,7 +338,7 @@ wheels = [ [[package]] name = "cryptography" version = "38.0.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, ] @@ -361,7 +361,7 @@ wheels = [ [[package]] name = "dacite" version = "1.9.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/55/a0/7ca79796e799a3e782045d29bf052b5cde7439a2bbb17f15ff44f7aacc63/dacite-1.9.2.tar.gz", hash = "sha256:6ccc3b299727c7aa17582f0021f6ae14d5de47c7227932c47fec4cdfefd26f09", size = 22420, upload-time = "2025-02-05T09:27:29.757Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/94/35/386550fd60316d1e37eccdda609b074113298f23cef5bddb2049823fe666/dacite-1.9.2-py3-none-any.whl", hash = "sha256:053f7c3f5128ca2e9aceb66892b1a3c8936d02c686e707bee96e19deef4bc4a0", size = 16600, upload-time = "2025-02-05T09:27:24.345Z" }, @@ -370,7 +370,7 @@ wheels = [ [[package]] name = "dataclasses-json" version = "0.6.7" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "marshmallow" }, { name = "typing-inspect" }, @@ -383,7 +383,7 @@ wheels = [ [[package]] name = "distro" version = "1.9.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, @@ -392,7 +392,7 @@ wheels = [ [[package]] name = "exceptiongroup" version = "1.3.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] @@ -401,10 +401,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, ] +[[package]] +name = "fakeredis" +version = "2.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "redis" }, + { name = "sortedcontainers" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/40/fd09efa66205eb32253d2b2ebc63537281384d2040f0a88bcd2289e120e4/fakeredis-2.34.1.tar.gz", hash = "sha256:4ff55606982972eecce3ab410e03d746c11fe5deda6381d913641fbd8865ea9b", size = 177315, upload-time = "2026-02-25T13:17:51.315Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/b5/82f89307d0d769cd9bf46a54fb9136be08e4e57c5570ae421db4c9a2ba62/fakeredis-2.34.1-py3-none-any.whl", hash = "sha256:0107ec99d48913e7eec2a5e3e2403d1bd5f8aa6489d1a634571b975289c48f12", size = 122160, upload-time = "2026-02-25T13:17:49.701Z" }, +] + [[package]] name = "fastapi" version = "0.116.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, @@ -418,7 +432,7 @@ wheels = [ [[package]] name = "filelock" version = "3.18.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, @@ -427,7 +441,7 @@ wheels = [ [[package]] name = "frozenlist" version = "1.7.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a", size = 81304, upload-time = "2025-06-09T22:59:46.226Z" }, @@ -521,7 +535,7 @@ wheels = [ [[package]] name = "fsspec" version = "2025.7.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8b/02/0835e6ab9cfc03916fe3f78c0956cfcdb6ff2669ffa6651065d5ebf7fc98/fsspec-2025.7.0.tar.gz", hash = "sha256:786120687ffa54b8283d942929540d8bc5ccfa820deb555a2b5d0ed2b737bf58", size = 304432, upload-time = "2025-07-15T16:05:21.19Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" }, @@ -530,7 +544,7 @@ wheels = [ [[package]] name = "gevent" version = "25.9.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation == 'CPython' and sys_platform == 'win32'" }, { name = "greenlet", marker = "platform_python_implementation == 'CPython'" }, @@ -582,7 +596,7 @@ wheels = [ [[package]] name = "googleapis-common-protos" version = "1.70.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] @@ -594,7 +608,7 @@ wheels = [ [[package]] name = "greenlet" version = "3.2.3" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/92/db/b4c12cff13ebac2786f4f217f06588bccd8b53d260453404ef22b121fc3a/greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be", size = 268977, upload-time = "2025-06-05T16:10:24.001Z" }, @@ -645,7 +659,7 @@ wheels = [ [[package]] name = "griffe" version = "1.7.3" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama" }, ] @@ -657,7 +671,7 @@ wheels = [ [[package]] name = "grpcio" version = "1.76.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -718,7 +732,7 @@ wheels = [ [[package]] name = "h11" version = "0.16.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, @@ -727,7 +741,7 @@ wheels = [ [[package]] name = "hf-xet" version = "1.1.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694", size = 495969, upload-time = "2025-06-20T21:48:38.007Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23", size = 2687929, upload-time = "2025-06-20T21:48:32.284Z" }, @@ -742,7 +756,7 @@ wheels = [ [[package]] name = "httpcore" version = "1.0.9" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "h11" }, @@ -755,7 +769,7 @@ wheels = [ [[package]] name = "httptools" version = "0.6.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0", size = 198780, upload-time = "2024-10-16T19:44:06.882Z" }, @@ -791,7 +805,7 @@ wheels = [ [[package]] name = "httpx" version = "0.28.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "certifi" }, @@ -806,7 +820,7 @@ wheels = [ [[package]] name = "httpx-sse" version = "0.4.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, @@ -815,7 +829,7 @@ wheels = [ [[package]] name = "huggingface-hub" version = "0.34.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, { name = "fsspec" }, @@ -834,7 +848,7 @@ wheels = [ [[package]] name = "idna" version = "3.10" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, @@ -843,7 +857,7 @@ wheels = [ [[package]] name = "importlib-metadata" version = "8.7.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] @@ -855,7 +869,7 @@ wheels = [ [[package]] name = "iniconfig" version = "2.1.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, @@ -864,7 +878,7 @@ wheels = [ [[package]] name = "jinja2" version = "3.1.6" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] @@ -876,7 +890,7 @@ wheels = [ [[package]] name = "jiter" version = "0.10.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/be/7e/4011b5c77bec97cb2b572f566220364e3e21b51c48c5bd9c4a9c26b41b67/jiter-0.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2fb72b02478f06a900a5782de2ef47e0396b3e1f7d5aba30daeb1fce66f303", size = 317215, upload-time = "2025-05-18T19:03:04.303Z" }, @@ -948,7 +962,7 @@ wheels = [ [[package]] name = "jmespath" version = "1.0.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, @@ -957,7 +971,7 @@ wheels = [ [[package]] name = "jsonpatch" version = "1.33" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpointer" }, ] @@ -969,7 +983,7 @@ wheels = [ [[package]] name = "jsonpointer" version = "3.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, @@ -978,7 +992,7 @@ wheels = [ [[package]] name = "jsonschema" version = "4.24.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "jsonschema-specifications" }, @@ -993,7 +1007,7 @@ wheels = [ [[package]] name = "jsonschema-specifications" version = "2025.4.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] @@ -1005,7 +1019,7 @@ wheels = [ [[package]] name = "langchain" version = "0.1.20" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "async-timeout", marker = "python_full_version < '3.11'" }, @@ -1029,7 +1043,7 @@ wheels = [ [[package]] name = "langchain-community" version = "0.0.38" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "dataclasses-json" }, @@ -1049,7 +1063,7 @@ wheels = [ [[package]] name = "langchain-core" version = "0.1.53" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, { name = "langsmith" }, @@ -1066,7 +1080,7 @@ wheels = [ [[package]] name = "langchain-openai" version = "0.0.6" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "numpy" }, @@ -1081,7 +1095,7 @@ wheels = [ [[package]] name = "langchain-text-splitters" version = "0.0.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, ] @@ -1093,7 +1107,7 @@ wheels = [ [[package]] name = "langsmith" version = "0.1.147" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, { name = "orjson", marker = "platform_python_implementation != 'PyPy'" }, @@ -1109,7 +1123,7 @@ wheels = [ [[package]] name = "litellm" version = "1.74.8" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "click" }, @@ -1131,7 +1145,7 @@ wheels = [ [[package]] name = "markdown-it-py" version = "3.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] @@ -1143,7 +1157,7 @@ wheels = [ [[package]] name = "markupsafe" version = "3.0.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, @@ -1201,7 +1215,7 @@ wheels = [ [[package]] name = "marshmallow" version = "3.26.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] @@ -1213,7 +1227,7 @@ wheels = [ [[package]] name = "mcp" version = "1.11.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, @@ -1235,7 +1249,7 @@ wheels = [ [[package]] name = "mdurl" version = "0.1.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, @@ -1244,7 +1258,7 @@ wheels = [ [[package]] name = "multidict" version = "6.6.3" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] @@ -1346,7 +1360,7 @@ wheels = [ [[package]] name = "mypy" version = "1.16.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, { name = "pathspec" }, @@ -1385,7 +1399,7 @@ wheels = [ [[package]] name = "mypy-extensions" version = "1.1.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, @@ -1394,7 +1408,7 @@ wheels = [ [[package]] name = "nexus-rpc" version = "1.3.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -1406,7 +1420,7 @@ wheels = [ [[package]] name = "nodeenv" version = "1.9.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, @@ -1415,7 +1429,7 @@ wheels = [ [[package]] name = "numpy" version = "1.26.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468, upload-time = "2024-02-05T23:48:01.194Z" }, @@ -1447,7 +1461,7 @@ wheels = [ [[package]] name = "openai" version = "1.108.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "distro" }, @@ -1466,7 +1480,7 @@ wheels = [ [[package]] name = "openai-agents" version = "0.3.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe" }, { name = "mcp" }, @@ -1489,7 +1503,7 @@ litellm = [ [[package]] name = "opentelemetry-api" version = "1.35.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata" }, { name = "typing-extensions" }, @@ -1502,7 +1516,7 @@ wheels = [ [[package]] name = "opentelemetry-exporter-otlp-proto-common" version = "1.35.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] @@ -1514,7 +1528,7 @@ wheels = [ [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.35.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, { name = "grpcio" }, @@ -1532,7 +1546,7 @@ wheels = [ [[package]] name = "opentelemetry-proto" version = "1.35.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] @@ -1544,7 +1558,7 @@ wheels = [ [[package]] name = "opentelemetry-sdk" version = "1.35.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, @@ -1558,7 +1572,7 @@ wheels = [ [[package]] name = "opentelemetry-semantic-conventions" version = "0.56b0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "typing-extensions" }, @@ -1571,7 +1585,7 @@ wheels = [ [[package]] name = "orjson" version = "3.11.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/e0/30/5aed63d5af1c8b02fbd2a8d83e2a6c8455e30504c50dbf08c8b51403d873/orjson-3.11.4-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e3aa2118a3ece0d25489cbe48498de8a5d580e42e8d9979f65bf47900a15aba1", size = 243870, upload-time = "2025-10-24T15:48:28.908Z" }, @@ -1652,7 +1666,7 @@ wheels = [ [[package]] name = "outcome" version = "1.3.0.post0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, ] @@ -1664,7 +1678,7 @@ wheels = [ [[package]] name = "packaging" version = "23.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714, upload-time = "2023-10-01T13:50:05.279Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011, upload-time = "2023-10-01T13:50:03.745Z" }, @@ -1673,7 +1687,7 @@ wheels = [ [[package]] name = "pandas" version = "2.3.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "python-dateutil" }, @@ -1721,7 +1735,7 @@ wheels = [ [[package]] name = "pastel" version = "0.2.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555, upload-time = "2020-09-16T19:21:12.43Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" }, @@ -1730,7 +1744,7 @@ wheels = [ [[package]] name = "pathspec" version = "0.12.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, @@ -1739,7 +1753,7 @@ wheels = [ [[package]] name = "pluggy" version = "1.6.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, @@ -1748,7 +1762,7 @@ wheels = [ [[package]] name = "poethepoet" version = "0.36.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pastel" }, { name = "pyyaml" }, @@ -1762,7 +1776,7 @@ wheels = [ [[package]] name = "propcache" version = "0.3.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, @@ -1851,7 +1865,7 @@ wheels = [ [[package]] name = "protobuf" version = "5.29.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, @@ -1865,7 +1879,7 @@ wheels = [ [[package]] name = "pyarrow" version = "22.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/d9/9b/cb3f7e0a345353def531ca879053e9ef6b9f38ed91aebcf68b09ba54dec0/pyarrow-22.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:77718810bd3066158db1e95a63c160ad7ce08c6b0710bc656055033e39cdad88", size = 34223968, upload-time = "2025-10-24T10:03:31.21Z" }, @@ -1922,7 +1936,7 @@ wheels = [ [[package]] name = "pycparser" version = "2.22" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, @@ -1931,7 +1945,7 @@ wheels = [ [[package]] name = "pydantic" version = "2.12.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, @@ -1946,7 +1960,7 @@ wheels = [ [[package]] name = "pydantic-core" version = "2.41.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -2064,7 +2078,7 @@ wheels = [ [[package]] name = "pydantic-settings" version = "2.10.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, @@ -2078,7 +2092,7 @@ wheels = [ [[package]] name = "pygments" version = "2.19.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, @@ -2087,7 +2101,7 @@ wheels = [ [[package]] name = "pyright" version = "1.1.403" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, @@ -2100,7 +2114,7 @@ wheels = [ [[package]] name = "pytest" version = "7.4.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, @@ -2117,7 +2131,7 @@ wheels = [ [[package]] name = "pytest-asyncio" version = "0.18.3" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] @@ -2130,7 +2144,7 @@ wheels = [ [[package]] name = "pytest-pretty" version = "1.3.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, { name = "rich" }, @@ -2143,7 +2157,7 @@ wheels = [ [[package]] name = "python-dateutil" version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] @@ -2155,7 +2169,7 @@ wheels = [ [[package]] name = "python-dotenv" version = "1.1.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, @@ -2164,7 +2178,7 @@ wheels = [ [[package]] name = "python-multipart" version = "0.0.20" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, @@ -2173,7 +2187,7 @@ wheels = [ [[package]] name = "pytz" version = "2025.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, @@ -2182,7 +2196,7 @@ wheels = [ [[package]] name = "pywin32" version = "311" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, @@ -2204,7 +2218,7 @@ wheels = [ [[package]] name = "pyyaml" version = "6.0.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, @@ -2245,10 +2259,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] +[[package]] +name = "redis" +version = "7.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" }, +] + [[package]] name = "referencing" version = "0.36.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, @@ -2262,7 +2288,7 @@ wheels = [ [[package]] name = "regex" version = "2024.11.6" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/95/3c/4651f6b130c6842a8f3df82461a8950f923925db8b6961063e82744bddcc/regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91", size = 482674, upload-time = "2024-11-06T20:08:57.575Z" }, @@ -2331,7 +2357,7 @@ wheels = [ [[package]] name = "requests" version = "2.32.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, @@ -2346,7 +2372,7 @@ wheels = [ [[package]] name = "requests-toolbelt" version = "1.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, ] @@ -2358,7 +2384,7 @@ wheels = [ [[package]] name = "rich" version = "14.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, @@ -2372,7 +2398,7 @@ wheels = [ [[package]] name = "rpds-py" version = "0.26.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b9/31/1459645f036c3dfeacef89e8e5825e430c77dde8489f3b99eaafcd4a60f5/rpds_py-0.26.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4c70c70f9169692b36307a95f3d8c0a9fcd79f7b4a383aad5eaa0e9718b79b37", size = 372466, upload-time = "2025-07-01T15:53:40.55Z" }, @@ -2498,7 +2524,7 @@ wheels = [ [[package]] name = "ruff" version = "0.5.7" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/bf/2b/69e5e412f9d390adbdbcbf4f64d6914fa61b44b08839a6584655014fc524/ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5", size = 2449817, upload-time = "2024-08-08T15:43:07.467Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/6b/eb/06e06aaf96af30a68e83b357b037008c54a2ddcbad4f989535007c700394/ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a", size = 9570571, upload-time = "2024-08-08T15:41:56.537Z" }, @@ -2523,7 +2549,7 @@ wheels = [ [[package]] name = "s3transfer" version = "0.13.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, ] @@ -2535,7 +2561,7 @@ wheels = [ [[package]] name = "sentry-sdk" version = "2.34.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, @@ -2548,7 +2574,7 @@ wheels = [ [[package]] name = "setuptools" version = "80.9.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, @@ -2557,7 +2583,7 @@ wheels = [ [[package]] name = "six" version = "1.17.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, @@ -2566,7 +2592,7 @@ wheels = [ [[package]] name = "sniffio" version = "1.3.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, @@ -2575,7 +2601,7 @@ wheels = [ [[package]] name = "sortedcontainers" version = "2.4.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, @@ -2584,7 +2610,7 @@ wheels = [ [[package]] name = "sqlalchemy" version = "2.0.41" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, { name = "typing-extensions" }, @@ -2629,7 +2655,7 @@ wheels = [ [[package]] name = "sse-starlette" version = "2.4.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] @@ -2641,7 +2667,7 @@ wheels = [ [[package]] name = "starlette" version = "0.47.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, @@ -2654,7 +2680,7 @@ wheels = [ [[package]] name = "temporalio" version = "1.23.0" -source = { registry = "https://test.pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nexus-rpc" }, { name = "protobuf" }, @@ -2662,13 +2688,13 @@ dependencies = [ { name = "types-protobuf" }, { name = "typing-extensions" }, ] -sdist = { url = "https://test-files.pythonhosted.org/packages/67/48/ba7413e2fab8dcd277b9df00bafa572da24e9ca32de2f38d428dc3a2825c/temporalio-1.23.0.tar.gz", hash = "sha256:72750494b00eb73ded9db76195e3a9b53ff548780f73d878ec3f807ee3191410", size = 1933051, upload-time = "2026-02-18T17:40:03.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/67/48/ba7413e2fab8dcd277b9df00bafa572da24e9ca32de2f38d428dc3a2825c/temporalio-1.23.0.tar.gz", hash = "sha256:72750494b00eb73ded9db76195e3a9b53ff548780f73d878ec3f807ee3191410", size = 1933051, upload-time = "2026-02-18T17:48:22.353Z" } wheels = [ - { url = "https://test-files.pythonhosted.org/packages/6f/71/26c8f21dca9092201b3b9cb7aff42460b4864b5999aa4c6a4343ac66f1fd/temporalio-1.23.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6b69ac8d75f2d90e66f4edce4316f6a33badc4a30b22efc50e9eddaa9acdc216", size = 12311037, upload-time = "2026-02-18T17:39:27.941Z" }, - { url = "https://test-files.pythonhosted.org/packages/ec/47/43102816139f2d346680cb7cc1e53da5f6968355ac65b4d35d4edbfca896/temporalio-1.23.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:1bbbb2f9c3cdd09451565163f6d741e51f109694c49435d475fdfa42b597219d", size = 11821906, upload-time = "2026-02-18T17:39:35.343Z" }, - { url = "https://test-files.pythonhosted.org/packages/00/b0/899ff28464a0e17adf17476bdfac8faf4ea41870358ff2d14737e43f9e66/temporalio-1.23.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf6570e0ee696f99a38d855da4441a890c7187357c16505ed458ac9ef274ed70", size = 12063601, upload-time = "2026-02-18T17:39:43.299Z" }, - { url = "https://test-files.pythonhosted.org/packages/ed/17/b8c6d2ec3e113c6a788322513a5ff635bdd54b3791d092ed0e273467748a/temporalio-1.23.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b82d6cca54c9f376b50e941dd10d12f7fe5b692a314fb087be72cd2898646a79", size = 12394579, upload-time = "2026-02-18T17:39:52.935Z" }, - { url = "https://test-files.pythonhosted.org/packages/b4/b7/f9ef7fd5ee65aef7d59ab1e95cb1b45df2fe49c17e3aa4d650ae3322f015/temporalio-1.23.0-cp310-abi3-win_amd64.whl", hash = "sha256:43c3b99a46dd329761a256f3855710c4a5b322afc879785e468bdd0b94faace6", size = 12834494, upload-time = "2026-02-18T17:40:00.858Z" }, + { url = "https://files.pythonhosted.org/packages/6f/71/26c8f21dca9092201b3b9cb7aff42460b4864b5999aa4c6a4343ac66f1fd/temporalio-1.23.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6b69ac8d75f2d90e66f4edce4316f6a33badc4a30b22efc50e9eddaa9acdc216", size = 12311037, upload-time = "2026-02-18T17:47:47.628Z" }, + { url = "https://files.pythonhosted.org/packages/ec/47/43102816139f2d346680cb7cc1e53da5f6968355ac65b4d35d4edbfca896/temporalio-1.23.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:1bbbb2f9c3cdd09451565163f6d741e51f109694c49435d475fdfa42b597219d", size = 11821906, upload-time = "2026-02-18T17:47:55.314Z" }, + { url = "https://files.pythonhosted.org/packages/00/b0/899ff28464a0e17adf17476bdfac8faf4ea41870358ff2d14737e43f9e66/temporalio-1.23.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf6570e0ee696f99a38d855da4441a890c7187357c16505ed458ac9ef274ed70", size = 12063601, upload-time = "2026-02-18T17:48:03.994Z" }, + { url = "https://files.pythonhosted.org/packages/ed/17/b8c6d2ec3e113c6a788322513a5ff635bdd54b3791d092ed0e273467748a/temporalio-1.23.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b82d6cca54c9f376b50e941dd10d12f7fe5b692a314fb087be72cd2898646a79", size = 12394579, upload-time = "2026-02-18T17:48:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b7/f9ef7fd5ee65aef7d59ab1e95cb1b45df2fe49c17e3aa4d650ae3322f015/temporalio-1.23.0-cp310-abi3-win_amd64.whl", hash = "sha256:43c3b99a46dd329761a256f3855710c4a5b322afc879785e468bdd0b94faace6", size = 12834494, upload-time = "2026-02-18T17:48:19.071Z" }, ] [package.optional-dependencies] @@ -2700,6 +2726,7 @@ cloud-export-to-parquet = [ { name = "pyarrow" }, ] dev = [ + { name = "fakeredis" }, { name = "frozenlist" }, { name = "mypy" }, { name = "poethepoet" }, @@ -2719,6 +2746,9 @@ encryption = [ { name = "aiohttp" }, { name = "cryptography" }, ] +external-storage-redis = [ + { name = "redis" }, +] gevent = [ { name = "gevent" }, ] @@ -2766,6 +2796,7 @@ cloud-export-to-parquet = [ { name = "pyarrow", specifier = ">=19.0.1" }, ] dev = [ + { name = "fakeredis", specifier = ">=2,<3" }, { name = "frozenlist", specifier = ">=1.4.0,<2" }, { name = "mypy", specifier = ">=1.4.1,<2" }, { name = "poethepoet", specifier = ">=0.36.0" }, @@ -2785,6 +2816,7 @@ encryption = [ { name = "aiohttp", specifier = ">=3.8.1,<4" }, { name = "cryptography", specifier = ">=38.0.1,<39" }, ] +external-storage-redis = [{ name = "redis", specifier = ">=5.0.0,<8" }] gevent = [{ name = "gevent", marker = "python_full_version >= '3.8'", specifier = ">=25.4.2" }] langchain = [ { name = "fastapi", specifier = ">=0.115.12" }, @@ -2815,7 +2847,7 @@ trio-async = [ [[package]] name = "tenacity" version = "8.5.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a3/4d/6a19536c50b849338fcbe9290d562b52cbdcf30d8963d3588a68a4107df1/tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78", size = 47309, upload-time = "2024-07-05T07:25:31.836Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687", size = 28165, upload-time = "2024-07-05T07:25:29.591Z" }, @@ -2824,7 +2856,7 @@ wheels = [ [[package]] name = "tiktoken" version = "0.12.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, @@ -2885,7 +2917,7 @@ wheels = [ [[package]] name = "tokenizers" version = "0.21.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] @@ -2910,7 +2942,7 @@ wheels = [ [[package]] name = "tomli" version = "2.2.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, @@ -2949,7 +2981,7 @@ wheels = [ [[package]] name = "tqdm" version = "4.67.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] @@ -2961,7 +2993,7 @@ wheels = [ [[package]] name = "trio" version = "0.28.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "cffi", marker = "implementation_name != 'pypy' and os_name == 'nt'" }, @@ -2979,7 +3011,7 @@ wheels = [ [[package]] name = "trio-asyncio" version = "0.15.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "greenlet" }, @@ -2995,7 +3027,7 @@ wheels = [ [[package]] name = "types-protobuf" version = "6.30.2.20250703" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/dc/54/d63ce1eee8e93c4d710bbe2c663ec68e3672cf4f2fca26eecd20981c0c5d/types_protobuf-6.30.2.20250703.tar.gz", hash = "sha256:609a974754bbb71fa178fc641f51050395e8e1849f49d0420a6281ed8d1ddf46", size = 62300, upload-time = "2025-07-03T03:14:05.74Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7e/2b/5d0377c3d6e0f49d4847ad2c40629593fee4a5c9ec56eba26a15c708fbc0/types_protobuf-6.30.2.20250703-py3-none-any.whl", hash = "sha256:fa5aff9036e9ef432d703abbdd801b436a249b6802e4df5ef74513e272434e57", size = 76489, upload-time = "2025-07-03T03:14:04.453Z" }, @@ -3004,7 +3036,7 @@ wheels = [ [[package]] name = "types-pyyaml" version = "6.0.12.20250516" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/4e/22/59e2aeb48ceeee1f7cd4537db9568df80d62bdb44a7f9e743502ea8aab9c/types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba", size = 17378, upload-time = "2025-05-16T03:08:04.897Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/99/5f/e0af6f7f6a260d9af67e1db4f54d732abad514252a7a378a6c4d17dd1036/types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530", size = 20312, upload-time = "2025-05-16T03:08:04.019Z" }, @@ -3013,7 +3045,7 @@ wheels = [ [[package]] name = "types-requests" version = "2.32.4.20250611" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] @@ -3025,7 +3057,7 @@ wheels = [ [[package]] name = "typing-extensions" version = "4.14.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, @@ -3034,7 +3066,7 @@ wheels = [ [[package]] name = "typing-inspect" version = "0.9.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, { name = "typing-extensions" }, @@ -3047,7 +3079,7 @@ wheels = [ [[package]] name = "typing-inspection" version = "0.4.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -3059,7 +3091,7 @@ wheels = [ [[package]] name = "tzdata" version = "2025.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, @@ -3068,7 +3100,7 @@ wheels = [ [[package]] name = "urllib3" version = "2.5.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, @@ -3077,7 +3109,7 @@ wheels = [ [[package]] name = "uvicorn" version = "0.24.0.post1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, @@ -3102,7 +3134,7 @@ standard = [ [[package]] name = "uvloop" version = "0.21.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f", size = 1442019, upload-time = "2024-10-14T23:37:20.068Z" }, @@ -3134,7 +3166,7 @@ wheels = [ [[package]] name = "watchfiles" version = "1.1.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] @@ -3234,7 +3266,7 @@ wheels = [ [[package]] name = "websockets" version = "15.0.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423, upload-time = "2025-03-05T20:01:35.363Z" }, @@ -3293,7 +3325,7 @@ wheels = [ [[package]] name = "yarl" version = "1.20.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, @@ -3392,7 +3424,7 @@ wheels = [ [[package]] name = "zipp" version = "3.23.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, @@ -3401,7 +3433,7 @@ wheels = [ [[package]] name = "zope-event" version = "5.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "setuptools" }, ] @@ -3413,7 +3445,7 @@ wheels = [ [[package]] name = "zope-interface" version = "7.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "setuptools" }, ] From f0b583035ba524d92c3e0758a89a57b33d774001 Mon Sep 17 00:00:00 2001 From: Edward Amsden Date: Thu, 9 Apr 2026 12:20:31 -0500 Subject: [PATCH 2/3] Fix lints --- pyproject.toml | 2 +- tests/external_storage_redis/test_redis.py | 10 ++++++--- .../test_redis_worker.py | 14 ++++-------- uv.lock | 22 +++++++++---------- 4 files changed, 23 insertions(+), 25 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 99266054..cee65cfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = [{ name = "Temporal Technologies Inc", email = "sdk@temporal.io" }] requires-python = ">=3.10" readme = "README.md" license = "MIT" -dependencies = ["temporalio>=1.23.0,<2"] +dependencies = ["temporalio>=1.25.0,<2"] [project.urls] Homepage = "https://github.com/temporalio/samples-python" diff --git a/tests/external_storage_redis/test_redis.py b/tests/external_storage_redis/test_redis.py index 30e174de..c1a7e3e3 100644 --- a/tests/external_storage_redis/test_redis.py +++ b/tests/external_storage_redis/test_redis.py @@ -12,7 +12,6 @@ import fakeredis.aioredis import pytest - from temporalio.api.common.v1 import Payload from temporalio.converter import ( JSONPlainPayloadConverter, @@ -625,7 +624,10 @@ async def test_store_client_failure_raises( expected_key = f"{KEY_PREFIX}:v0:d:sha256:{expected_hash}" with pytest.raises(RuntimeError) as exc_info: await driver.store(make_store_context(), [payload]) - assert str(exc_info.value) == f"RedisStorageDriver store failed [key={expected_key}]" + assert ( + str(exc_info.value) + == f"RedisStorageDriver store failed [key={expected_key}]" + ) assert isinstance(exc_info.value.__cause__, ConnectionError) async def test_retrieve_client_failure_raises( @@ -667,7 +669,9 @@ async def test_retrieve_payloads_concurrently( ) -> None: num_payloads = 5 driver = RedisStorageDriver(client=driver_client, key_prefix=KEY_PREFIX) - payloads = [make_payload(f"concurrent-retrieve-{i}") for i in range(num_payloads)] + payloads = [ + make_payload(f"concurrent-retrieve-{i}") for i in range(num_payloads) + ] claims = await driver.store(make_store_context(), payloads) barrier = _AsyncBarrier(num_payloads) diff --git a/tests/external_storage_redis/test_redis_worker.py b/tests/external_storage_redis/test_redis_worker.py index 7c8d1a28..f89d1da1 100644 --- a/tests/external_storage_redis/test_redis_worker.py +++ b/tests/external_storage_redis/test_redis_worker.py @@ -11,7 +11,6 @@ import pytest import pytest_asyncio - import temporalio.converter from temporalio.client import Client, WorkflowFailureError from temporalio.converter import ExternalStorage, JSONPlainPayloadConverter @@ -157,8 +156,7 @@ async def test_redis_driver_workflow_output_key( keys = await _list_keys(redis_asyncio_client) assert len(keys) == 1 assert ( - f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" - in keys[0] + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" in keys[0] ) assert ":ri:null:" not in keys[0] @@ -203,8 +201,7 @@ async def test_redis_driver_workflow_activity_output_key( keys = await _list_keys(redis_asyncio_client) assert len(keys) == 1 assert ( - f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" - in keys[0] + f"{KEY_PREFIX}:v0:ns:default:wt:LargeIOWorkflow:wi:{workflow_id}:ri:" in keys[0] ) assert ":ri:null:" not in keys[0] @@ -237,8 +234,7 @@ async def test_redis_driver_standalone_activity_input_key( keys = await _list_keys(redis_asyncio_client) assert len(keys) == 2 assert all( - f"{KEY_PREFIX}:v0:ns:default:at:large_io_activity:ai:{activity_id}:ri:" - in key + f"{KEY_PREFIX}:v0:ns:default:at:large_io_activity:ai:{activity_id}:ri:" in key for key in keys ) assert all(":wt:" not in key for key in keys) @@ -345,9 +341,7 @@ async def test_redis_driver_update_result_key( task_queue=worker.task_queue, execution_timeout=timedelta(seconds=5), ) - result = await handle.execute_update( - SignalQueryUpdateWorkflow.do_update, LARGE - ) + result = await handle.execute_update(SignalQueryUpdateWorkflow.do_update, LARGE) assert result == LARGE await handle.signal(SignalQueryUpdateWorkflow.finish, "done") await handle.result() diff --git a/uv.lock b/uv.lock index a1331f1b..59061d21 100644 --- a/uv.lock +++ b/uv.lock @@ -1407,14 +1407,14 @@ wheels = [ [[package]] name = "nexus-rpc" -version = "1.3.0" +version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/f2/d54f5c03d8f4672ccc0875787a385f53dcb61f98a8ae594b5620e85b9cb3/nexus_rpc-1.3.0.tar.gz", hash = "sha256:e56d3b57b60d707ce7a72f83f23f106b86eca1043aa658e44582ab5ff30ab9ad", size = 75650, upload-time = "2025-12-08T22:59:13.002Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/d5/cd1ffb202b76ebc1b33c1332a3416e55a39929006982adc2b1eb069aaa9b/nexus_rpc-1.4.0.tar.gz", hash = "sha256:3b8b373d4865671789cc43623e3dc0bcbf192562e40e13727e17f1c149050fba", size = 82367, upload-time = "2026-02-25T22:01:34.053Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/74/0afd841de3199c148146c1d43b4bfb5605b2f1dc4c9a9087fe395091ea5a/nexus_rpc-1.3.0-py3-none-any.whl", hash = "sha256:aee0707b4861b22d8124ecb3f27d62dafbe8777dc50c66c91e49c006f971b92d", size = 28873, upload-time = "2025-12-08T22:59:12.024Z" }, + { url = "https://files.pythonhosted.org/packages/11/52/6327a5f4fda01207205038a106a99848a41c83e933cd23ea2cab3d2ebc6c/nexus_rpc-1.4.0-py3-none-any.whl", hash = "sha256:14c953d3519113f8ccec533a9efdb6b10c28afef75d11cdd6d422640c40b3a49", size = 29645, upload-time = "2026-02-25T22:01:33.122Z" }, ] [[package]] @@ -2679,7 +2679,7 @@ wheels = [ [[package]] name = "temporalio" -version = "1.23.0" +version = "1.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nexus-rpc" }, @@ -2688,13 +2688,13 @@ dependencies = [ { name = "types-protobuf" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/48/ba7413e2fab8dcd277b9df00bafa572da24e9ca32de2f38d428dc3a2825c/temporalio-1.23.0.tar.gz", hash = "sha256:72750494b00eb73ded9db76195e3a9b53ff548780f73d878ec3f807ee3191410", size = 1933051, upload-time = "2026-02-18T17:48:22.353Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/9c/3782bab0bf11a40b550147c19a5d1a476c17405391751982408902d9f138/temporalio-1.25.0.tar.gz", hash = "sha256:a3bbec1dcc904f674402cfa4faae480fda490b1c53ea5440c1f1996c562016fb", size = 2152534, upload-time = "2026-04-08T18:53:55.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/71/26c8f21dca9092201b3b9cb7aff42460b4864b5999aa4c6a4343ac66f1fd/temporalio-1.23.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6b69ac8d75f2d90e66f4edce4316f6a33badc4a30b22efc50e9eddaa9acdc216", size = 12311037, upload-time = "2026-02-18T17:47:47.628Z" }, - { url = "https://files.pythonhosted.org/packages/ec/47/43102816139f2d346680cb7cc1e53da5f6968355ac65b4d35d4edbfca896/temporalio-1.23.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:1bbbb2f9c3cdd09451565163f6d741e51f109694c49435d475fdfa42b597219d", size = 11821906, upload-time = "2026-02-18T17:47:55.314Z" }, - { url = "https://files.pythonhosted.org/packages/00/b0/899ff28464a0e17adf17476bdfac8faf4ea41870358ff2d14737e43f9e66/temporalio-1.23.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf6570e0ee696f99a38d855da4441a890c7187357c16505ed458ac9ef274ed70", size = 12063601, upload-time = "2026-02-18T17:48:03.994Z" }, - { url = "https://files.pythonhosted.org/packages/ed/17/b8c6d2ec3e113c6a788322513a5ff635bdd54b3791d092ed0e273467748a/temporalio-1.23.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b82d6cca54c9f376b50e941dd10d12f7fe5b692a314fb087be72cd2898646a79", size = 12394579, upload-time = "2026-02-18T17:48:11.65Z" }, - { url = "https://files.pythonhosted.org/packages/b4/b7/f9ef7fd5ee65aef7d59ab1e95cb1b45df2fe49c17e3aa4d650ae3322f015/temporalio-1.23.0-cp310-abi3-win_amd64.whl", hash = "sha256:43c3b99a46dd329761a256f3855710c4a5b322afc879785e468bdd0b94faace6", size = 12834494, upload-time = "2026-02-18T17:48:19.071Z" }, + { url = "https://files.pythonhosted.org/packages/19/e3/5676dd10d1164b6d6ca8752314054097b89c5da931e936af402a7b15236c/temporalio-1.25.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6dc1bc8e1773b1a833d86a7ede2dd90ef4e031ced5b748b59e7f09a5bf9b327d", size = 13943906, upload-time = "2026-04-08T18:53:30.022Z" }, + { url = "https://files.pythonhosted.org/packages/89/50/7cbf7f845973be986ec165348f72f7a409750842a04d554965a39be5cb4f/temporalio-1.25.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:3c8fdcf79ea5ae8ae2cf6f48072e4a86c3e0f4778f6a8a066c6ff1d336587db4", size = 13298719, upload-time = "2026-04-08T18:53:35.95Z" }, + { url = "https://files.pythonhosted.org/packages/d2/31/d474bab8535552add6ed289911bf1ffae5d7071823ece1069842190fcaed/temporalio-1.25.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:141f37aaafd7d090ba5c8776e4e9bc60df1fbc64b9f50c8f00e905a436588ddc", size = 13555435, upload-time = "2026-04-08T18:53:41.36Z" }, + { url = "https://files.pythonhosted.org/packages/2a/c8/e7dc053d6107bf2a037a3c9fe7b86639a25dcb888bde0e1ca366901ee47f/temporalio-1.25.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7ca5bb80264976477d4dc7a839b3d22af8577ae92306526a061481db49bf92", size = 14052050, upload-time = "2026-04-08T18:53:46.44Z" }, + { url = "https://files.pythonhosted.org/packages/08/70/9340ed3a578321cbc153041d34834bb1ec3f1f3e3d9cded47cd1b7c3e403/temporalio-1.25.0-cp310-abi3-win_amd64.whl", hash = "sha256:9411534279a2e64847231b6059c214bff4d57cfd1532bd09f333d0b1603daa7f", size = 14299684, upload-time = "2026-04-08T18:53:52.482Z" }, ] [package.optional-dependencies] @@ -2785,7 +2785,7 @@ trio-async = [ ] [package.metadata] -requires-dist = [{ name = "temporalio", specifier = ">=1.23.0,<2" }] +requires-dist = [{ name = "temporalio", specifier = ">=1.25.0,<2" }] [package.metadata.requires-dev] bedrock = [{ name = "boto3", specifier = ">=1.34.92,<2" }] From 999e23103da22e984ef91f79da4ba542408f7538 Mon Sep 17 00:00:00 2001 From: Edward Amsden Date: Thu, 9 Apr 2026 13:13:37 -0500 Subject: [PATCH 3/3] Fix CI tests --- external_storage_redis/redis_asyncio.py | 6 ++++-- tests/external_storage_redis/conftest.py | 5 ++++- tests/external_storage_redis/test_redis.py | 20 ++++++++++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/external_storage_redis/redis_asyncio.py b/external_storage_redis/redis_asyncio.py index 6f7ca312..917b239c 100644 --- a/external_storage_redis/redis_asyncio.py +++ b/external_storage_redis/redis_asyncio.py @@ -4,11 +4,13 @@ import math from datetime import timedelta - -from redis.asyncio.client import Redis +from typing import TYPE_CHECKING from external_storage_redis._client import RedisStorageDriverClient +if TYPE_CHECKING: + from redis.asyncio.client import Redis + class _RedisAsyncioStorageDriverClient(RedisStorageDriverClient): """Adapter that wraps a ``redis.asyncio.Redis`` client. diff --git a/tests/external_storage_redis/conftest.py b/tests/external_storage_redis/conftest.py index 5684a7c3..b6736ab3 100644 --- a/tests/external_storage_redis/conftest.py +++ b/tests/external_storage_redis/conftest.py @@ -3,15 +3,18 @@ from __future__ import annotations from collections.abc import AsyncIterator +from typing import TYPE_CHECKING import fakeredis.aioredis import pytest import pytest_asyncio -from redis.asyncio.client import Redis from external_storage_redis import RedisStorageDriverClient from external_storage_redis.redis_asyncio import new_redis_asyncio_client +if TYPE_CHECKING: + from redis.asyncio.client import Redis + KEY_PREFIX = "test:payloads" diff --git a/tests/external_storage_redis/test_redis.py b/tests/external_storage_redis/test_redis.py index c1a7e3e3..33b871a2 100644 --- a/tests/external_storage_redis/test_redis.py +++ b/tests/external_storage_redis/test_redis.py @@ -4,6 +4,8 @@ import asyncio import hashlib +import subprocess +import sys from collections.abc import Callable, Coroutine from datetime import timedelta from functools import wraps @@ -30,6 +32,24 @@ _CONVERTER = JSONPlainPayloadConverter() +def test_redis_asyncio_adapter_import_is_workflow_safe() -> None: + completed = subprocess.run( + [ + sys.executable, + "-c", + ( + "import sys; " + "import external_storage_redis.redis_asyncio; " + "print(any(name.startswith('redis.asyncio') for name in sys.modules))" + ), + ], + check=True, + capture_output=True, + text=True, + ) + assert completed.stdout.strip() == "False" + + def make_payload(value: str = "hello") -> Payload: payload = _CONVERTER.to_payload(value) assert payload is not None