From afcf6ce9c5b88f9b60cb59ca6087250c092d714e Mon Sep 17 00:00:00 2001 From: Matthias Howell Date: Fri, 17 Apr 2026 15:41:34 -0400 Subject: [PATCH] adds Valkey Storage Implementation --- .../src/crewai_files/cache/upload_cache.py | 358 +- lib/crewai/pyproject.toml | 3 + lib/crewai/src/crewai/a2a/utils/agent_card.py | 22 +- lib/crewai/src/crewai/a2a/utils/task.py | 127 +- lib/crewai/src/crewai/memory/encoding_flow.py | 27 +- .../src/crewai/memory/storage/valkey_cache.py | 185 + .../crewai/memory/storage/valkey_storage.py | 1918 ++++++++++ lib/crewai/src/crewai/memory/types.py | 55 +- .../src/crewai/memory/unified_memory.py | 65 +- lib/crewai/src/crewai/tools/memory_tools.py | 48 +- lib/crewai/src/crewai/translations/en.json | 4 +- .../src/crewai/utilities/cache_config.py | 66 + .../tests/memory/storage/test_valkey_cache.py | 499 +++ .../memory/storage/test_valkey_storage.py | 3172 +++++++++++++++++ .../storage/test_valkey_storage_errors.py | 343 ++ .../storage/test_valkey_storage_scope.py | 1109 ++++++ .../storage/test_valkey_storage_search.py | 1175 ++++++ uv.lock | 768 ++-- 18 files changed, 9301 insertions(+), 643 deletions(-) create mode 100644 lib/crewai/src/crewai/memory/storage/valkey_cache.py create mode 100644 lib/crewai/src/crewai/memory/storage/valkey_storage.py create mode 100644 lib/crewai/src/crewai/utilities/cache_config.py create mode 100644 lib/crewai/tests/memory/storage/test_valkey_cache.py create mode 100644 lib/crewai/tests/memory/storage/test_valkey_storage.py create mode 100644 lib/crewai/tests/memory/storage/test_valkey_storage_errors.py create mode 100644 lib/crewai/tests/memory/storage/test_valkey_storage_scope.py create mode 100644 lib/crewai/tests/memory/storage/test_valkey_storage_search.py diff --git a/lib/crewai-files/src/crewai_files/cache/upload_cache.py b/lib/crewai-files/src/crewai_files/cache/upload_cache.py index 48cebdfa14..d9935a9322 100644 --- a/lib/crewai-files/src/crewai_files/cache/upload_cache.py +++ b/lib/crewai-files/src/crewai_files/cache/upload_cache.py @@ -1,4 +1,4 @@ -"""Cache for tracking uploaded files using aiocache.""" +"""Cache for tracking uploaded files using aiocache or ValkeyCache.""" from __future__ import annotations @@ -10,10 +10,11 @@ from datetime import datetime, timezone import hashlib import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Protocol from aiocache import Cache # type: ignore[import-untyped] from aiocache.serializers import PickleSerializer # type: ignore[import-untyped] +from crewai.utilities.cache_config import parse_cache_url from crewai_files.core.constants import DEFAULT_MAX_CACHE_ENTRIES, DEFAULT_TTL_SECONDS from crewai_files.uploaders.factory import ProviderType @@ -51,6 +52,33 @@ def is_expired(self) -> bool: return False return datetime.now(timezone.utc) >= self.expires_at + def to_dict(self) -> dict[str, Any]: + """Serialize to a JSON-compatible dict.""" + return { + "file_id": self.file_id, + "provider": self.provider, + "file_uri": self.file_uri, + "content_type": self.content_type, + "uploaded_at": self.uploaded_at.isoformat(), + "expires_at": self.expires_at.isoformat() if self.expires_at else None, + } + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> CachedUpload: + """Deserialize from a dict.""" + return cls( + file_id=data["file_id"], + provider=data["provider"], + file_uri=data.get("file_uri"), + content_type=data["content_type"], + uploaded_at=datetime.fromisoformat(data["uploaded_at"]), + expires_at=( + datetime.fromisoformat(data["expires_at"]) + if data.get("expires_at") + else None + ), + ) + def _make_key(file_hash: str, provider: str) -> str: """Create a cache key from file hash and provider.""" @@ -58,14 +86,7 @@ def _make_key(file_hash: str, provider: str) -> str: def _compute_file_hash_streaming(chunks: Iterator[bytes]) -> str: - """Compute SHA-256 hash from streaming chunks. - - Args: - chunks: Iterator of byte chunks. - - Returns: - Hexadecimal hash string. - """ + """Compute SHA-256 hash from streaming chunks.""" hasher = hashlib.sha256() for chunk in chunks: hasher.update(chunk) @@ -73,10 +94,7 @@ def _compute_file_hash_streaming(chunks: Iterator[bytes]) -> str: def _compute_file_hash(file: FileInput) -> str: - """Compute SHA-256 hash of file content. - - Uses streaming for FilePath sources to avoid loading large files into memory. - """ + """Compute SHA-256 hash of file content.""" from crewai_files.core.sources import FilePath source = file._file_source @@ -86,10 +104,88 @@ def _compute_file_hash(file: FileInput) -> str: return hashlib.sha256(content).hexdigest() +# --------------------------------------------------------------------------- +# Cache backend protocol + implementations +# --------------------------------------------------------------------------- + + +class CacheBackend(Protocol): + """Protocol for cache backends used by UploadCache.""" + + async def get(self, key: str) -> CachedUpload | None: ... + async def set(self, key: str, value: CachedUpload, ttl: int) -> None: ... + async def delete(self, key: str) -> bool: ... + + +class AiocacheBackend: + """Cache backend backed by aiocache (memory or Redis).""" + + def __init__(self, cache: Cache) -> None: # type: ignore[no-any-unimported] + self._cache = cache + + async def get(self, key: str) -> CachedUpload | None: + result = await self._cache.get(key) + if isinstance(result, CachedUpload): + return result + return None + + async def set(self, key: str, value: CachedUpload, ttl: int) -> None: + await self._cache.set(key, value, ttl=ttl) + + async def delete(self, key: str) -> bool: + result = await self._cache.delete(key) + return bool(result > 0 if isinstance(result, int) else result) + + +class ValkeyCacheBackend: + """Cache backend backed by ValkeyCache (JSON serialization).""" + + def __init__( + self, + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + default_ttl: int | None = None, + ) -> None: + from crewai.memory.storage.valkey_cache import ValkeyCache + + self._cache = ValkeyCache( + host=host, port=port, db=db, password=password, default_ttl=default_ttl + ) + + async def get(self, key: str) -> CachedUpload | None: + data = await self._cache.get(key) + if data is None: + return None + try: + return CachedUpload.from_dict(data) + except (KeyError, ValueError) as e: + logger.warning(f"Failed to deserialize cached upload: {e}") + return None + + async def set(self, key: str, value: CachedUpload, ttl: int) -> None: + await self._cache.set(key, value.to_dict(), ttl=ttl) + + async def delete(self, key: str) -> bool: + await self._cache.delete(key) + return True # ValkeyCache.delete is void + + +# --------------------------------------------------------------------------- +# Helper: parse Valkey/Redis URL from environment +# --------------------------------------------------------------------------- + + +# --------------------------------------------------------------------------- +# UploadCache +# --------------------------------------------------------------------------- + + class UploadCache: - """Async cache for tracking uploaded files using aiocache. + """Async cache for tracking uploaded files. - Supports in-memory caching by default, with optional Redis backend + Supports in-memory caching by default, with optional Redis or Valkey backend for distributed setups. Attributes: @@ -110,7 +206,7 @@ def __init__( Args: ttl: Default TTL in seconds. namespace: Cache namespace. - cache_type: Backend type ("memory" or "redis"). + cache_type: Backend type ("memory", "redis", or "valkey"). max_entries: Maximum cache entries (None for unlimited). **cache_kwargs: Additional args for cache backend. """ @@ -120,18 +216,39 @@ def __init__( self._provider_keys: dict[ProviderType, set[str]] = {} self._key_access_order: list[str] = [] - if cache_type == "redis": - self._cache = Cache( - Cache.REDIS, - serializer=PickleSerializer(), - namespace=namespace, - **cache_kwargs, + self._backend: CacheBackend = self._create_backend( + cache_type, namespace, ttl, **cache_kwargs + ) + + @staticmethod + def _create_backend( + cache_type: str, + namespace: str, + ttl: int, + **cache_kwargs: Any, + ) -> CacheBackend: + """Create the appropriate cache backend.""" + if cache_type == "valkey": + conn = parse_cache_url() or {} + return ValkeyCacheBackend( + host=cache_kwargs.get("host", conn.get("host", "localhost")), + port=cache_kwargs.get("port", conn.get("port", 6379)), + db=cache_kwargs.get("db", conn.get("db", 0)), + password=cache_kwargs.get("password", conn.get("password")), + default_ttl=ttl, ) - else: - self._cache = Cache( - serializer=PickleSerializer(), - namespace=namespace, + if cache_type == "redis": + return AiocacheBackend( + Cache( + Cache.REDIS, + serializer=PickleSerializer(), + namespace=namespace, + **cache_kwargs, + ) ) + return AiocacheBackend( + Cache(serializer=PickleSerializer(), namespace=namespace) + ) def _track_key(self, provider: ProviderType, key: str) -> None: """Track a key for a provider (for cleanup) and access order.""" @@ -150,84 +267,53 @@ def _untrack_key(self, provider: ProviderType, key: str) -> None: self._key_access_order.remove(key) async def _evict_if_needed(self) -> int: - """Evict oldest entries if limit exceeded. - - Returns: - Number of entries evicted. - """ + """Evict oldest entries if limit exceeded.""" if self.max_entries is None: return 0 - current_count = len(self) if current_count < self.max_entries: return 0 - to_evict = max(1, self.max_entries // 10) return await self._evict_oldest(to_evict) async def _evict_oldest(self, count: int) -> int: - """Evict the oldest entries from the cache. - - Args: - count: Number of entries to evict. - - Returns: - Number of entries actually evicted. - """ + """Evict the oldest entries from the cache.""" evicted = 0 keys_to_evict = self._key_access_order[:count] - for key in keys_to_evict: - await self._cache.delete(key) + await self._backend.delete(key) self._key_access_order.remove(key) for provider_keys in self._provider_keys.values(): provider_keys.discard(key) evicted += 1 - if evicted > 0: logger.debug(f"Evicted {evicted} oldest cache entries") - return evicted + # ------------------------------------------------------------------ + # Async public API + # ------------------------------------------------------------------ + async def aget( self, file: FileInput, provider: ProviderType ) -> CachedUpload | None: - """Get a cached upload for a file. - - Args: - file: The file to look up. - provider: The provider name. - - Returns: - Cached upload if found and not expired, None otherwise. - """ + """Get a cached upload for a file.""" file_hash = _compute_file_hash(file) return await self.aget_by_hash(file_hash, provider) async def aget_by_hash( self, file_hash: str, provider: ProviderType ) -> CachedUpload | None: - """Get a cached upload by file hash. - - Args: - file_hash: Hash of the file content. - provider: The provider name. - - Returns: - Cached upload if found and not expired, None otherwise. - """ + """Get a cached upload by file hash.""" key = _make_key(file_hash, provider) - result = await self._cache.get(key) - + result = await self._backend.get(key) if result is None: return None - if isinstance(result, CachedUpload): - if result.is_expired(): - await self._cache.delete(key) - self._untrack_key(provider, key) - return None - return result - return None + if result.is_expired(): + await self._backend.delete(key) + self._untrack_key(provider, key) + return None + return result async def aset( self, @@ -237,18 +323,7 @@ async def aset( file_uri: str | None = None, expires_at: datetime | None = None, ) -> CachedUpload: - """Cache an uploaded file. - - Args: - file: The file that was uploaded. - provider: The provider name. - file_id: Provider-specific file identifier. - file_uri: Optional URI for accessing the file. - expires_at: When the upload expires. - - Returns: - The created cache entry. - """ + """Cache an uploaded file.""" file_hash = _compute_file_hash(file) return await self.aset_by_hash( file_hash=file_hash, @@ -268,21 +343,8 @@ async def aset_by_hash( file_uri: str | None = None, expires_at: datetime | None = None, ) -> CachedUpload: - """Cache an uploaded file by hash. - - Args: - file_hash: Hash of the file content. - content_type: MIME type of the file. - provider: The provider name. - file_id: Provider-specific file identifier. - file_uri: Optional URI for accessing the file. - expires_at: When the upload expires. - - Returns: - The created cache entry. - """ + """Cache an uploaded file by hash.""" await self._evict_if_needed() - key = _make_key(file_hash, provider) now = datetime.now(timezone.utc) @@ -299,106 +361,74 @@ async def aset_by_hash( if expires_at is not None: ttl = max(0, int((expires_at - now).total_seconds())) - await self._cache.set(key, cached, ttl=ttl) + await self._backend.set(key, cached, ttl=ttl) self._track_key(provider, key) logger.debug(f"Cached upload: {file_id} for provider {provider}") return cached async def aremove(self, file: FileInput, provider: ProviderType) -> bool: - """Remove a cached upload. - - Args: - file: The file to remove. - provider: The provider name. - - Returns: - True if entry was removed, False if not found. - """ + """Remove a cached upload.""" file_hash = _compute_file_hash(file) key = _make_key(file_hash, provider) - - result = await self._cache.delete(key) - removed = bool(result > 0 if isinstance(result, int) else result) + removed = await self._backend.delete(key) if removed: self._untrack_key(provider, key) return removed async def aremove_by_file_id(self, file_id: str, provider: ProviderType) -> bool: - """Remove a cached upload by file ID. - - Args: - file_id: The file ID to remove. - provider: The provider name. - - Returns: - True if entry was removed, False if not found. - """ + """Remove a cached upload by file ID.""" if provider not in self._provider_keys: return False - for key in list(self._provider_keys[provider]): - cached = await self._cache.get(key) - if isinstance(cached, CachedUpload) and cached.file_id == file_id: - await self._cache.delete(key) + cached = await self._backend.get(key) + if cached is not None and cached.file_id == file_id: + await self._backend.delete(key) self._untrack_key(provider, key) return True return False async def aclear_expired(self) -> int: - """Remove all expired entries from the cache. - - Returns: - Number of entries removed. - """ + """Remove all expired entries from the cache.""" removed = 0 - for provider, keys in list(self._provider_keys.items()): for key in list(keys): - cached = await self._cache.get(key) - if cached is None or ( - isinstance(cached, CachedUpload) and cached.is_expired() - ): - await self._cache.delete(key) + cached = await self._backend.get(key) + if cached is None or cached.is_expired(): + await self._backend.delete(key) self._untrack_key(provider, key) removed += 1 - if removed > 0: logger.debug(f"Cleared {removed} expired cache entries") return removed async def aclear(self) -> int: - """Clear all entries from the cache. - - Returns: - Number of entries cleared. - """ + """Clear all entries from the cache.""" count = sum(len(keys) for keys in self._provider_keys.values()) - await self._cache.clear(namespace=self.namespace) + # Delete all tracked keys individually (works for all backends) + for keys in self._provider_keys.values(): + for key in keys: + await self._backend.delete(key) self._provider_keys.clear() - + self._key_access_order.clear() if count > 0: logger.debug(f"Cleared {count} cache entries") return count async def aget_all_for_provider(self, provider: ProviderType) -> list[CachedUpload]: - """Get all cached uploads for a provider. - - Args: - provider: The provider name. - - Returns: - List of cached uploads for the provider. - """ + """Get all cached uploads for a provider.""" if provider not in self._provider_keys: return [] - results: list[CachedUpload] = [] for key in list(self._provider_keys[provider]): - cached = await self._cache.get(key) - if isinstance(cached, CachedUpload) and not cached.is_expired(): + cached = await self._backend.get(key) + if cached is not None and not cached.is_expired(): results.append(cached) return results + # ------------------------------------------------------------------ + # Sync wrappers + # ------------------------------------------------------------------ + @staticmethod def _run_sync(coro: Any) -> Any: """Run an async coroutine from sync context without blocking event loop.""" @@ -489,11 +519,7 @@ def __len__(self) -> int: return sum(len(keys) for keys in self._provider_keys.values()) def get_providers(self) -> builtins.set[ProviderType]: - """Get all provider names that have cached entries. - - Returns: - Set of provider names. - """ + """Get all provider names that have cached entries.""" return builtins.set(self._provider_keys.keys()) @@ -506,17 +532,7 @@ def get_upload_cache( cache_type: str = "memory", **cache_kwargs: Any, ) -> UploadCache: - """Get or create the default upload cache. - - Args: - ttl: Default TTL in seconds. - namespace: Cache namespace. - cache_type: Backend type ("memory" or "redis"). - **cache_kwargs: Additional args for cache backend. - - Returns: - The upload cache instance. - """ + """Get or create the default upload cache.""" global _default_cache if _default_cache is None: _default_cache = UploadCache( diff --git a/lib/crewai/pyproject.toml b/lib/crewai/pyproject.toml index 76000baad3..928d830c8c 100644 --- a/lib/crewai/pyproject.toml +++ b/lib/crewai/pyproject.toml @@ -111,6 +111,9 @@ file-processing = [ qdrant-edge = [ "qdrant-edge-py>=0.6.0", ] +valkey = [ + "valkey-glide>=1.3.0", +] [project.scripts] diff --git a/lib/crewai/src/crewai/a2a/utils/agent_card.py b/lib/crewai/src/crewai/a2a/utils/agent_card.py index df5886988e..d3a47e2fef 100644 --- a/lib/crewai/src/crewai/a2a/utils/agent_card.py +++ b/lib/crewai/src/crewai/a2a/utils/agent_card.py @@ -13,8 +13,12 @@ from typing import TYPE_CHECKING from a2a.client.errors import A2AClientHTTPError -from a2a.types import AgentCapabilities, AgentCard, AgentSkill -from aiocache import cached # type: ignore[import-untyped] +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentSkill, +) +from aiocache import cached, caches # type: ignore[import-untyped] from aiocache.serializers import PickleSerializer # type: ignore[import-untyped] import httpx @@ -32,6 +36,7 @@ A2AAuthenticationFailedEvent, A2AConnectionErrorEvent, ) +from crewai.utilities.cache_config import get_aiocache_config if TYPE_CHECKING: @@ -40,6 +45,18 @@ from crewai.task import Task +_cache_configured = False + + +def _ensure_cache_configured() -> None: + """Configure aiocache on first use (lazy initialization).""" + global _cache_configured + if _cache_configured: + return + caches.set_config(get_aiocache_config()) + _cache_configured = True + + def _get_tls_verify(auth: ClientAuthScheme | None) -> ssl.SSLContext | bool | str: """Get TLS verify parameter from auth scheme. @@ -191,6 +208,7 @@ async def afetch_agent_card( else: auth_hash = _auth_store.compute_key("none", "") _auth_store.set(auth_hash, auth) + _ensure_cache_configured() agent_card: AgentCard = await _afetch_agent_card_cached( endpoint, auth_hash, timeout ) diff --git a/lib/crewai/src/crewai/a2a/utils/task.py b/lib/crewai/src/crewai/a2a/utils/task.py index 6af935bb35..b1d1f2a142 100644 --- a/lib/crewai/src/crewai/a2a/utils/task.py +++ b/lib/crewai/src/crewai/a2a/utils/task.py @@ -9,9 +9,8 @@ from functools import wraps import json import logging -import os +import threading from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar, cast -from urllib.parse import urlparse from a2a.server.agent_execution import RequestContext from a2a.server.events import EventQueue @@ -38,7 +37,6 @@ from a2a.utils.errors import ServerError from aiocache import SimpleMemoryCache, caches # type: ignore[import-untyped] from pydantic import BaseModel -from typing_extensions import TypedDict from crewai.a2a.utils.agent_card import _get_server_config from crewai.a2a.utils.content_type import validate_message_parts @@ -50,12 +48,18 @@ A2AServerTaskStartedEvent, ) from crewai.task import Task +from crewai.utilities.cache_config import ( + get_aiocache_config, + parse_cache_url, + use_valkey_cache, +) from crewai.utilities.pydantic_schema_utils import create_model_from_schema if TYPE_CHECKING: from crewai.a2a.extensions.server import ExtensionContext, ServerExtensionRegistry from crewai.agent import Agent + from crewai.memory.storage.valkey_cache import ValkeyCache logger = logging.getLogger(__name__) @@ -64,52 +68,40 @@ T = TypeVar("T") -class RedisCacheConfig(TypedDict, total=False): - """Configuration for aiocache Redis backend.""" - - cache: str - endpoint: str - port: int - db: int - password: str +# --------------------------------------------------------------------------- +# Lazy cache initialisation +# --------------------------------------------------------------------------- +_task_cache: ValkeyCache | None = None +_cache_initialized = False +_cache_init_lock = threading.Lock() -def _parse_redis_url(url: str) -> RedisCacheConfig: - """Parse a Redis URL into aiocache configuration. - Args: - url: Redis connection URL (e.g., redis://localhost:6379/0). +def _ensure_task_cache() -> None: + """Initialise the task cache on first use (thread-safe).""" + global _task_cache, _cache_initialized + if _cache_initialized: + return - Returns: - Configuration dict for aiocache.RedisCache. - """ - parsed = urlparse(url) - config: RedisCacheConfig = { - "cache": "aiocache.RedisCache", - "endpoint": parsed.hostname or "localhost", - "port": parsed.port or 6379, - } - if parsed.path and parsed.path != "/": - try: - config["db"] = int(parsed.path.lstrip("/")) - except ValueError: - pass - if parsed.password: - config["password"] = parsed.password - return config + with _cache_init_lock: + if _cache_initialized: + return + if use_valkey_cache(): + from crewai.memory.storage.valkey_cache import ValkeyCache -_redis_url = os.environ.get("REDIS_URL") + conn = parse_cache_url() or {} + _task_cache = ValkeyCache( + host=conn.get("host", "localhost"), + port=conn.get("port", 6379), + db=conn.get("db", 0), + password=conn.get("password"), + default_ttl=3600, + ) + else: + caches.set_config(get_aiocache_config()) -caches.set_config( - { - "default": _parse_redis_url(_redis_url) - if _redis_url - else { - "cache": "aiocache.SimpleMemoryCache", - } - } -) + _cache_initialized = True def cancellable( @@ -130,6 +122,8 @@ def cancellable( @wraps(fn) async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: """Wrap function with cancellation monitoring.""" + _ensure_task_cache() + context: RequestContext | None = None for arg in args: if isinstance(arg, RequestContext): @@ -142,10 +136,19 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: return await fn(*args, **kwargs) task_id = context.task_id - cache = caches.get("default") - async def poll_for_cancel() -> bool: - """Poll cache for cancellation flag.""" + async def poll_for_cancel_valkey() -> bool: + """Poll ValkeyCache for cancellation flag.""" + while True: + if _task_cache is not None and await _task_cache.get( + f"cancel:{task_id}" + ): + return True + await asyncio.sleep(0.1) + + async def poll_for_cancel_aiocache() -> bool: + """Poll aiocache for cancellation flag.""" + cache = caches.get("default") while True: if await cache.get(f"cancel:{task_id}"): return True @@ -153,8 +156,14 @@ async def poll_for_cancel() -> bool: async def watch_for_cancel() -> bool: """Watch for cancellation events via pub/sub or polling.""" + if _task_cache is not None: + # ValkeyCache: use polling (pub/sub not implemented yet) + return await poll_for_cancel_valkey() + + # aiocache: use pub/sub if Redis, otherwise poll + cache = caches.get("default") if isinstance(cache, SimpleMemoryCache): - return await poll_for_cancel() + return await poll_for_cancel_aiocache() try: client = cache.client @@ -168,7 +177,7 @@ async def watch_for_cancel() -> bool: "Cancel watcher Redis error, falling back to polling", extra={"task_id": task_id, "error": str(e)}, ) - return await poll_for_cancel() + return await poll_for_cancel_aiocache() return False execute_task = asyncio.create_task(fn(*args, **kwargs)) @@ -190,7 +199,12 @@ async def watch_for_cancel() -> bool: cancel_watch.cancel() return execute_task.result() finally: - await cache.delete(f"cancel:{task_id}") + # Clean up cancellation flag + if _task_cache is not None: + await _task_cache.delete(f"cancel:{task_id}") + else: + cache = caches.get("default") + await cache.delete(f"cancel:{task_id}") return wrapper @@ -475,6 +489,8 @@ async def cancel( if task_id is None or context_id is None: raise ServerError(InvalidParamsError(message="task_id and context_id required")) + _ensure_task_cache() + if context.current_task and context.current_task.status.state in ( TaskState.completed, TaskState.failed, @@ -482,11 +498,16 @@ async def cancel( ): return context.current_task - cache = caches.get("default") - - await cache.set(f"cancel:{task_id}", True, ttl=3600) - if not isinstance(cache, SimpleMemoryCache): - await cache.client.publish(f"cancel:{task_id}", "cancel") + if _task_cache is not None: + # Use ValkeyCache + await _task_cache.set(f"cancel:{task_id}", True, ttl=3600) + # Note: pub/sub not implemented for ValkeyCache yet, relies on polling + else: + # Use aiocache + cache = caches.get("default") + await cache.set(f"cancel:{task_id}", True, ttl=3600) + if not isinstance(cache, SimpleMemoryCache): + await cache.client.publish(f"cancel:{task_id}", "cancel") await event_queue.enqueue_event( TaskStatusUpdateEvent( diff --git a/lib/crewai/src/crewai/memory/encoding_flow.py b/lib/crewai/src/crewai/memory/encoding_flow.py index acd025d553..ac753d26d0 100644 --- a/lib/crewai/src/crewai/memory/encoding_flow.py +++ b/lib/crewai/src/crewai/memory/encoding_flow.py @@ -18,7 +18,7 @@ from typing import Any from uuid import uuid4 -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, field_validator from crewai.flow.flow import Flow, listen, start from crewai.memory.analyze import ( @@ -68,6 +68,31 @@ class ItemState(BaseModel): plan: ConsolidationPlan | None = None result_record: MemoryRecord | None = None + @field_validator("similar_records", "result_record", mode="before") + @classmethod + def ensure_embedding_is_list(cls, v: Any) -> Any: + """Ensure MemoryRecord embeddings are list[float], not bytes.""" + if v is None: + return None + if isinstance(v, list): + # Process list of MemoryRecords + for record in v: + if isinstance(record, MemoryRecord) and isinstance( + record.embedding, bytes + ): + import numpy as np + + arr = np.frombuffer(record.embedding, dtype=np.float32) + record.embedding = [float(x) for x in arr] + return v + if isinstance(v, MemoryRecord) and isinstance(v.embedding, bytes): + # Process single MemoryRecord + import numpy as np + + arr = np.frombuffer(v.embedding, dtype=np.float32) + v.embedding = [float(x) for x in arr] + return v + class EncodingState(BaseModel): """Batch-level state for the encoding flow.""" diff --git a/lib/crewai/src/crewai/memory/storage/valkey_cache.py b/lib/crewai/src/crewai/memory/storage/valkey_cache.py new file mode 100644 index 0000000000..722706feae --- /dev/null +++ b/lib/crewai/src/crewai/memory/storage/valkey_cache.py @@ -0,0 +1,185 @@ +"""Valkey-based cache implementation for CrewAI. + +This module provides a simple cache interface using Valkey-GLIDE client +for caching operations with optional TTL support. It replaces Redis usage +in A2A communication, file uploads, and agent card caching. +""" + +from __future__ import annotations + +import json +import logging +from typing import Any + +from glide import GlideClient, GlideClientConfiguration, NodeAddress + + +_logger = logging.getLogger(__name__) + + +class ValkeyCache: + """Simple cache interface using Valkey-GLIDE client. + + Provides get/set/delete/exists operations for caching with optional TTL. + Uses JSON serialization for complex values and lazy client initialization. + + Example: + >>> cache = ValkeyCache(host="localhost", port=6379) + >>> await cache.set("key", {"data": "value"}, ttl=3600) + >>> value = await cache.get("key") + >>> await cache.delete("key") + """ + + def __init__( + self, + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + default_ttl: int | None = None, + ) -> None: + """Initialize Valkey cache. + + Args: + host: Valkey server hostname. + port: Valkey server port. + db: Database number to use. + password: Optional password for authentication. + default_ttl: Default TTL in seconds (None = no expiration). + """ + self._host = host + self._port = port + self._db = db + self._password = password + self._default_ttl = default_ttl + self._client: GlideClient | None = None + + async def _get_client(self) -> GlideClient: + """Get or create Valkey client (lazy initialization). + + Returns: + Initialized GlideClient instance. + + Raises: + RuntimeError: If connection to Valkey fails. + TimeoutError: If connection attempt times out (10 seconds). + """ + import asyncio + + if self._client is None: + try: + config = GlideClientConfiguration( + addresses=[NodeAddress(self._host, self._port)], + database_id=self._db, + ) + if self._password: + from glide import ServerCredentials + + config.credentials = ServerCredentials(password=self._password) + + # Add connection timeout (10 seconds) + try: + self._client = await asyncio.wait_for( + GlideClient.create(config), timeout=10.0 + ) + except asyncio.TimeoutError as e: + _logger.error( + f"Connection timeout after 10 seconds to Valkey at {self._host}:{self._port}" + ) + raise TimeoutError( + f"Connection timeout to Valkey at {self._host}:{self._port}. " + "Ensure Valkey is running and accessible." + ) from e + + _logger.info( + f"Valkey cache client initialized: {self._host}:{self._port}/{self._db}" + ) + except (TimeoutError, RuntimeError): + raise + except Exception as e: + _logger.error(f"Failed to create Valkey cache client: {e}") + raise RuntimeError( + f"Cannot connect to Valkey at {self._host}:{self._port}" + ) from e + + return self._client + + async def get(self, key: str) -> Any | None: + """Get value from cache. + + Args: + key: Cache key. + + Returns: + Cached value (deserialized from JSON) or None if not found. + """ + client = await self._get_client() + value = await client.get(key) + + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: + _logger.warning(f"Failed to deserialize cached value for key: {key}") + return None + + async def set( + self, + key: str, + value: Any, + ttl: int | None = None, + ) -> None: + """Set value in cache. + + Args: + key: Cache key. + value: Value to cache (will be serialized to JSON). + ttl: TTL in seconds (None uses default_ttl, 0 = no expiration). + """ + from glide import ExpirySet, ExpiryType + + client = await self._get_client() + serialized = json.dumps(value) + + ttl_to_use = ttl if ttl is not None else self._default_ttl + + if ttl_to_use and ttl_to_use > 0: + # Set with expiration using SET command with EX option + await client.set( + key, + serialized, + expiry=ExpirySet(ExpiryType.SEC, ttl_to_use), + ) + else: + await client.set(key, serialized) + + async def delete(self, key: str) -> None: + """Delete value from cache. + + Args: + key: Cache key to delete. + """ + client = await self._get_client() + await client.delete([key]) + + async def exists(self, key: str) -> bool: + """Check if key exists in cache. + + Args: + key: Cache key to check. + + Returns: + True if key exists, False otherwise. + """ + client = await self._get_client() + result = await client.exists([key]) + return result > 0 + + async def close(self) -> None: + """Close Valkey client connection.""" + if self._client: + await self._client.close() + self._client = None + _logger.debug("Valkey cache client closed") diff --git a/lib/crewai/src/crewai/memory/storage/valkey_storage.py b/lib/crewai/src/crewai/memory/storage/valkey_storage.py new file mode 100644 index 0000000000..662a71f2e2 --- /dev/null +++ b/lib/crewai/src/crewai/memory/storage/valkey_storage.py @@ -0,0 +1,1918 @@ +"""Valkey-backed storage for the unified memory system. + +This module provides ValkeyStorage, a distributed storage backend that implements +the StorageBackend protocol using Valkey-GLIDE as the underlying data store. +It supports vector similarity search via Valkey Search module and provides +efficient indexing for scope, category, and metadata filtering. +""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable, Coroutine +from datetime import datetime +import json +import logging +import threading +from typing import Any + +from glide import ( + BackoffStrategy, + ConfigurationError, + ConnectionError, + GlideClient, + GlideClientConfiguration, + NodeAddress, + ServerCredentials, +) +import numpy as np + +from crewai.memory.types import MemoryRecord, ScopeInfo + + +_logger = logging.getLogger(__name__) + + +class ValkeyStorage: + """Valkey-backed storage for the unified memory system. + + Provides distributed, high-performance storage using Valkey-GLIDE client. + Implements the StorageBackend protocol with both sync and async methods. + + This implementation supports standalone Valkey mode only. Cluster mode is + not supported in this version. + + Example: + >>> storage = ValkeyStorage(host="localhost", port=6379) + >>> record = MemoryRecord(content="test", embedding=[0.1, 0.2]) + >>> storage.save([record]) + >>> retrieved = storage.get_record(record.id) + """ + + def __init__( + self, + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + use_tls: bool = False, + tls_ca_cert_path: str | None = None, + tls_client_cert_path: str | None = None, + tls_client_key_path: str | None = None, + vector_dim: int = 1536, + index_algorithm: str = "HNSW", + ) -> None: + """Initialize Valkey storage with connection parameters and vector index config. + + Note: This implementation supports standalone Valkey mode only. + Cluster mode is not supported in this version. + + TLS Support: The current version of valkey-glide Python client has limited + TLS configuration options. Custom CA certificates and client certificates + are not yet supported in the Python binding. Use use_tls=True for basic + TLS encryption with system CA certificates. + + Args: + host: Valkey server hostname. + port: Valkey server port. + db: Database number to use (standalone mode only). + password: Optional password for authentication. + use_tls: Enable TLS/SSL encryption for connections. + tls_ca_cert_path: Reserved for future use (not yet supported by GLIDE). + tls_client_cert_path: Reserved for future use (not yet supported by GLIDE). + tls_client_key_path: Reserved for future use (not yet supported by GLIDE). + vector_dim: Dimension of embedding vectors (default 1536 for OpenAI). + index_algorithm: Vector index algorithm ("HNSW" or "FLAT"). + """ + self._host = host + self._port = port + self._db = db + self._password = password + self._use_tls = use_tls + # Store for future use when GLIDE adds support + self._tls_ca_cert_path = tls_ca_cert_path + self._tls_client_cert_path = tls_client_cert_path + self._tls_client_key_path = tls_client_key_path + self._vector_dim = vector_dim + self._index_algorithm = index_algorithm + self._client: GlideClient | None = None + self._index_created = False + self._sync_lock = threading.Lock() + + # Write lock for compatibility with memory system + # Note: Valkey handles concurrency at the server level, so this is a no-op lock + self._write_lock = threading.RLock() + + # Warn if TLS cert paths are provided but not supported + if use_tls and ( + tls_ca_cert_path or tls_client_cert_path or tls_client_key_path + ): + _logger.warning( + "Custom TLS certificates are not yet supported by valkey-glide Python client. " + "Using system CA certificates for TLS verification." + ) + + async def _get_client(self) -> GlideClient: + """Get or create Valkey client with lazy initialization. + + Returns: + Initialized GlideClient instance. + + Raises: + RuntimeError: If connection to Valkey fails. + TimeoutError: If connection attempt times out (10 seconds). + """ + if self._client is None: + try: + # Build node address with explicit host and port + node = NodeAddress(host=self._host, port=self._port) + + # Build configuration + config = GlideClientConfiguration( + addresses=[node], + database_id=self._db, + use_tls=self._use_tls, + request_timeout=2000, # 2 seconds for FT.SEARCH and other commands + reconnect_strategy=BackoffStrategy( + num_of_retries=5, + factor=200, # milliseconds + exponent_base=2, + ), + ) + + # Add authentication if provided + if self._password: + config.credentials = ServerCredentials(password=self._password) + + # Add connection timeout (10 seconds) + try: + self._client = await asyncio.wait_for( + GlideClient.create(config), timeout=10.0 + ) + except asyncio.TimeoutError as e: + _logger.error( + f"Connection timeout after 10 seconds to Valkey at {self._host}:{self._port}" + ) + raise TimeoutError( + f"Connection timeout to Valkey at {self._host}:{self._port}. " + "Ensure Valkey is running and accessible." + ) from e + + _logger.info( + f"Connected to Valkey at {self._host}:{self._port} (db={self._db}, tls={self._use_tls})" + ) + + except (ConfigurationError, ConnectionError) as e: + _logger.error(f"Failed to create Valkey client: {e}") + raise RuntimeError( + f"Cannot connect to Valkey at {self._host}:{self._port}" + ) from e + + return self._client + + @property + def write_lock(self) -> threading.RLock: + """Write lock for compatibility with memory system. + + Note: Valkey handles concurrency at the server level with atomic operations, + so this lock is primarily for API compatibility with other storage backends. + """ + return self._write_lock + + def _run_async(self, coro: Coroutine[Any, Any, Any]) -> Any: + """Bridge async operations to sync context. + + Uses a dedicated background thread with a persistent event loop so the + Valkey client (and its TCP connection) can be reused across calls. + + Concurrent sync callers are serialized via a lock to avoid overloading + the single-threaded background event loop (e.g. when the encoding flow + dispatches parallel searches from a ThreadPoolExecutor). + + Args: + coro: Coroutine to execute. + + Returns: + Result of the coroutine execution. + """ + with self._sync_lock: + bg_loop = self._get_or_create_loop() + future = asyncio.run_coroutine_threadsafe(coro, bg_loop) + return future.result() + + # ------------------------------------------------------------------ + # Persistent event-loop helpers + # ------------------------------------------------------------------ + # Class-level: a single background event loop shared by ALL ValkeyStorage + # instances. This is intentional — the loop is just an I/O scheduler and + # the glide client handles per-connection state internally. + # _bg_lock guards loop creation; _sync_lock (instance-level, set in + # __init__) serialises sync callers so they don't flood the loop. + # ------------------------------------------------------------------ + _bg_loop: asyncio.AbstractEventLoop | None = None + _bg_thread: threading.Thread | None = None + _bg_lock: threading.Lock = threading.Lock() + + @classmethod + def _get_or_create_loop(cls) -> asyncio.AbstractEventLoop: + """Return a long-lived event loop running on a background daemon thread.""" + if cls._bg_loop is not None and cls._bg_loop.is_running(): + return cls._bg_loop + + with cls._bg_lock: + # Double-check after acquiring lock + if cls._bg_loop is not None and cls._bg_loop.is_running(): + return cls._bg_loop + + loop = asyncio.new_event_loop() + thread = threading.Thread( + target=loop.run_forever, daemon=True, name="valkey-io" + ) + thread.start() + cls._bg_loop = loop + cls._bg_thread = thread + return loop + + async def _retry_operation( + self, + operation: Callable[[], Coroutine[Any, Any, Any]], + max_retries: int = 5, + ) -> Any: + """Retry operation with exponential backoff on connection errors. + + Retries operations that fail due to connection errors using exponential + backoff starting at 0.2 seconds. Logs connection errors at debug level. + + Args: + operation: Zero-argument callable that returns a fresh coroutine on + each invocation, allowing safe retries. + max_retries: Maximum number of retry attempts (default 5). + + Returns: + Result of the operation execution. + + Raises: + ConnectionError: If operation fails after max_retries attempts. + """ + from glide import ClosingError + + delay = 0.2 # Start with 200ms + last_error: Exception | None = None + + for attempt in range(max_retries + 1): + try: + return await operation() + except (ClosingError, ConnectionError) as e: # noqa: PERF203 + last_error = e + if attempt >= max_retries: + _logger.error(f"Operation failed after {max_retries} retries: {e}") + raise + + _logger.debug( + f"Connection error on attempt {attempt + 1}/{max_retries + 1}, " + f"retrying in {delay}s: {e}" + ) + await asyncio.sleep(delay) + delay *= 2 # Exponential backoff + + # Should never reach here, but satisfy type checker + if last_error: + raise last_error + raise RuntimeError("Retry operation failed unexpectedly") + + async def __aenter__(self) -> ValkeyStorage: + """Async context manager entry.""" + await self._get_client() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: Any, + ) -> None: + """Async context manager exit.""" + if self._client: + await self._client.close() + self._client = None + + def __del__(self) -> None: + """Cleanup client connection on deletion.""" + if self._client: + try: + bg_loop = type(self)._bg_loop + if bg_loop is not None and bg_loop.is_running(): + # Schedule close on the background loop — more reliable than + # create_task which can be GC'd before it runs. + asyncio.run_coroutine_threadsafe(self._client.close(), bg_loop) + else: + close_result = self._client.close() + if asyncio.iscoroutine(close_result): + asyncio.run(close_result) + except Exception as e: + _logger.debug(f"Error closing client during cleanup: {e}") + + def _embedding_to_bytes(self, embedding: list[float]) -> bytes: + """Convert embedding list to binary format for Valkey Search. + + Args: + embedding: List of floats representing the embedding vector. + + Returns: + Binary representation as float32 array. + """ + return np.array(embedding, dtype=np.float32).tobytes() + + def _bytes_to_embedding(self, data: bytes) -> list[float]: + """Convert binary format back to embedding list. + + Args: + data: Binary data from Valkey. + + Returns: + List of floats representing the embedding vector. + """ + arr = np.frombuffer(data, dtype=np.float32) + return [float(x) for x in arr] + + def _record_to_dict(self, record: MemoryRecord) -> dict[str, str | bytes]: + """Convert MemoryRecord to Valkey hash fields. + + Args: + record: Memory record to serialize. + + Returns: + Dictionary of field names to string/bytes values. + + Raises: + ValueError: If serialization fails for any field. + """ + try: + result: dict[str, str | bytes] = { + "id": record.id, + "content": record.content, + "scope": record.scope, + "categories": ",".join(record.categories) + if record.categories + else "", # TAG field format + "metadata": json.dumps(record.metadata), + "importance": str(record.importance), + "created_at": record.created_at.isoformat(), + "last_accessed": record.last_accessed.isoformat(), + "source": record.source or "", + "private": "true" if record.private else "false", + } + + # Add embedding as binary vector field if present + if record.embedding: + result["embedding"] = self._embedding_to_bytes(record.embedding) + else: + result["embedding"] = b"" # Empty bytes for no embedding + + return result + except (TypeError, ValueError) as e: + raise ValueError(f"Failed to serialize record {record.id}: {e}") from e + + def _dict_to_record( + self, data: dict[str, Any] | dict[bytes, bytes] + ) -> MemoryRecord | None: + """Convert Valkey hash fields to MemoryRecord. + + Args: + data: Dictionary of field names to values from Valkey (may be bytes or str keys/values). + + Returns: + Reconstructed MemoryRecord, or None if deserialization fails. + """ + try: + # Convert bytes keys/values to strings if needed + str_data: dict[str, Any] = {} + for key, value in data.items(): + str_key = key.decode("utf-8") if isinstance(key, bytes) else key + + # Handle value conversion - keep embedding as bytes + if isinstance(value, bytes): + if str_key == "embedding": + # Keep embedding as bytes - don't try to decode + str_data[str_key] = value + else: + # Try to decode other fields as UTF-8 + try: + str_data[str_key] = value.decode("utf-8") + except UnicodeDecodeError: + # Keep as bytes if decode fails + str_data[str_key] = value + else: + str_data[str_key] = value + + # Deserialize embedding if present + embedding: list[float] | None = None + embedding_data = str_data.get("embedding") + if embedding_data: + if isinstance(embedding_data, bytes): + if len(embedding_data) > 0: + embedding = self._bytes_to_embedding(embedding_data) + # else: empty bytes, leave embedding as None + elif isinstance(embedding_data, str) and embedding_data: + # Fallback for string representation + try: + embedding = json.loads(embedding_data) + except json.JSONDecodeError: + # Invalid JSON, leave as None + pass + + # Parse categories - handle both TAG format (comma-separated) and JSON format + categories_str = str_data.get("categories", "") + if categories_str: + if categories_str.startswith("["): + # JSON format (legacy) + categories = json.loads(categories_str) + else: + # TAG format (comma-separated) + categories = [ + c.strip() for c in categories_str.split(",") if c.strip() + ] + else: + categories = [] + + return MemoryRecord( + id=str_data["id"], + content=str_data["content"], + scope=str_data["scope"], + categories=categories, + metadata=json.loads(str_data["metadata"]), + importance=float(str_data["importance"]), + created_at=datetime.fromisoformat(str_data["created_at"]), + last_accessed=datetime.fromisoformat(str_data["last_accessed"]), + embedding=embedding, + source=str_data.get("source") or None, + private=str_data.get("private", "false").lower() == "true", + ) + except (KeyError, ValueError, TypeError) as e: + # Try to get ID from data for error logging + record_id = "unknown" + try: + if data: + # Try both bytes and str keys + id_value = data.get(b"id") if b"id" in data else data.get("id") # type: ignore[call-overload] + if id_value: + record_id = ( + id_value.decode("utf-8") + if isinstance(id_value, bytes) + else str(id_value) + ) + except Exception as id_error: + _logger.debug( + f"Could not extract record ID for error logging: {id_error}" + ) + _logger.error(f"Failed to deserialize record {record_id}: {e}") + return None + + async def _ensure_vector_index(self) -> None: + """Create Valkey Search vector index if it doesn't exist. + + Creates an index named 'memory_index' on record:* hashes with: + - Vector field for embeddings (HNSW or FLAT algorithm) + - TAG fields for scope and categories + - NUMERIC fields for created_at and importance + + Raises: + RuntimeError: If Valkey Search module is not available. + """ + if self._index_created: + return + + client = await self._get_client() + + try: + # Check if index exists using FT.INFO + cmd: list[str | bytes] = ["FT.INFO", "memory_index"] + await client.custom_command(cmd) + _logger.debug("Vector index 'memory_index' already exists") + self._index_created = True + return + except Exception as e: + # Index doesn't exist, create it + _logger.debug(f"Index does not exist, will create: {e}") + + try: + # Build FT.CREATE command + # FT.CREATE memory_index ON HASH PREFIX 1 record: SCHEMA ... + + # Build vector field parameters + vector_params = [ + "TYPE", + "FLOAT32", + "DIM", + str(self._vector_dim), + "DISTANCE_METRIC", + "COSINE", + ] + + # Add HNSW-specific parameters + if self._index_algorithm == "HNSW": + vector_params.extend(["M", "16", "EF_CONSTRUCTION", "200"]) + + # Count of vector field parameters (must be accurate) + param_count = str(len(vector_params)) + + create_cmd: list[str | bytes] = [ + "FT.CREATE", + "memory_index", + "ON", + "HASH", + "PREFIX", + "1", + "record:", + "SCHEMA", + "embedding", + "VECTOR", + self._index_algorithm, + param_count, # Number of vector field parameters + ] + + # Add the vector parameters + create_cmd.extend(vector_params) + + # Add TAG and NUMERIC fields + # Note: Each field definition is: field_name field_type [options] + create_cmd.extend( + [ + "scope", + "TAG", + "categories", + "TAG", + "SEPARATOR", + ",", + "created_at", + "NUMERIC", + "importance", + "NUMERIC", + ] + ) + + # Execute FT.CREATE + create_cmd_list: list[str | bytes] = list(create_cmd) + await client.custom_command(create_cmd_list) + _logger.info( + f"Created vector index 'memory_index' with {self._index_algorithm} algorithm (dim={self._vector_dim})" + ) + self._index_created = True + + except Exception as e: + error_msg = str(e).lower() + if "unknown command" in error_msg or "ft.create" in error_msg: + raise RuntimeError( + "Valkey Search module is not available. " + "Please ensure Valkey is running with the Search module loaded. " + "Use 'valkey/valkey-bundle:latest' Docker image or install the module separately." + ) from e + raise RuntimeError(f"Failed to create vector index: {e}") from e + + async def _update_indexes( + self, + record_id: str, + scope: str, + categories: list[str], + metadata: dict[str, Any], + timestamp: float, + ) -> None: + """Update all index structures for a record. + + Adds record ID to: + - Scope sorted set with timestamp score + - Category sets for all categories + - Metadata index sets for all metadata key-value pairs + + Args: + record_id: Unique identifier of the record. + scope: Hierarchical scope path (e.g., "/agent/task"). + categories: List of category names. + metadata: Dictionary of metadata key-value pairs. + timestamp: Unix timestamp for scope index score. + """ + client = await self._get_client() + + # Update scope index (sorted set with timestamp score) + # Handle root scope "/" as special case + scope_key = f"scope:{scope}" + await client.zadd(scope_key, {record_id: timestamp}) + + # Update category indexes (sets) + for category in categories: + category_key = f"category:{category}" + await client.sadd(category_key, [record_id]) + + # Update metadata indexes (sets for each key-value pair) + for key, value in metadata.items(): + # Convert value to string for consistent key naming + value_str = str(value) + metadata_key = f"metadata:{key}:{value_str}" + await client.sadd(metadata_key, [record_id]) + + async def _remove_from_indexes( + self, + record_id: str, + scope: str, + categories: list[str], + metadata: dict[str, Any], + ) -> None: + """Remove record from all index structures. + + Removes record ID from: + - Scope sorted set + - All category sets + - All metadata index sets + + Args: + record_id: Unique identifier of the record. + scope: Hierarchical scope path. + categories: List of category names. + metadata: Dictionary of metadata key-value pairs. + """ + client = await self._get_client() + + # Remove from scope index + scope_key = f"scope:{scope}" + await client.zrem(scope_key, [record_id]) + + # Remove from category indexes + for category in categories: + category_key = f"category:{category}" + await client.srem(category_key, [record_id]) + + # Remove from metadata indexes + for key, value in metadata.items(): + value_str = str(value) + metadata_key = f"metadata:{key}:{value_str}" + await client.srem(metadata_key, [record_id]) + + async def asave(self, records: list[MemoryRecord]) -> None: + """Save multiple records as a batch. + + Stores record fields in hash structure with key pattern "record:{id}". + Stores embedding as binary vector field in record hash for Valkey Search auto-indexing. + Updates scope sorted set, category sets, and metadata index sets. + + Note: + Operations are issued as individual commands, not wrapped in + MULTI/EXEC. Partial failures are possible under network errors. + + Args: + records: List of memory records to save. + + Raises: + ValueError: If serialization fails for any record. + RuntimeError: If Valkey connection fails. + """ + if not records: + return + + client = await self._get_client() + + # Ensure vector index exists before saving + await self._ensure_vector_index() + + # Build commands for atomic batch execution + for record in records: + record_key = f"record:{record.id}" + + # Convert record to hash fields (includes embedding as bytes) + record_dict = self._record_to_dict(record) + + # Store record hash (Valkey Search will auto-index it) + # HSET record:{id} field1 value1 field2 value2 ... + hset_cmd: list[str | bytes] = ["HSET"] + hset_cmd.append(record_key) + for field, value in record_dict.items(): + hset_cmd.append(field) + hset_cmd.append(value) + await client.custom_command(hset_cmd) + + # Update all index structures + timestamp = record.created_at.timestamp() + await self._update_indexes( + record.id, + record.scope, + record.categories, + record.metadata, + timestamp, + ) + + def save(self, records: list[MemoryRecord]) -> None: + """Save multiple records atomically (sync wrapper). + + Args: + records: List of memory records to save. + + Raises: + ValueError: If serialization fails for any record. + RuntimeError: If Valkey connection fails or called from async context. + """ + self._run_async(self.asave(records)) + + def get_record(self, record_id: str) -> MemoryRecord | None: + """Retrieve record by ID. + + Fetches record hash from "record:{id}" key and deserializes all fields + including datetime, JSON, and boolean values. + + Args: + record_id: Unique identifier of the record to retrieve. + + Returns: + MemoryRecord if found, None if record doesn't exist or deserialization fails. + """ + result: MemoryRecord | None = self._run_async(self._aget_record(record_id)) + return result + + async def _aget_record(self, record_id: str) -> MemoryRecord | None: + """Retrieve record by ID (async implementation). + + Args: + record_id: Unique identifier of the record to retrieve. + + Returns: + MemoryRecord if found, None if record doesn't exist or deserialization fails. + """ + client = await self._get_client() + record_key = f"record:{record_id}" + + try: + # Fetch all fields from record hash + data = await client.hgetall(record_key) + + if not data: + # Record doesn't exist + return None + + # Deserialize to MemoryRecord + return self._dict_to_record(data) + + except Exception as e: + _logger.error(f"Error retrieving record {record_id}: {e}") + return None + + def update(self, record: MemoryRecord) -> None: + """Update existing record or create new one. + + Preserves created_at timestamp from original record if it exists. + Updates last_accessed timestamp to current time. + Removes record from old indexes and adds to new indexes atomically. + + Args: + record: Memory record to update. + + Raises: + ValueError: If serialization fails. + RuntimeError: If Valkey connection fails or called from async context. + """ + self._run_async(self._aupdate(record)) + + async def _aupdate(self, record: MemoryRecord) -> None: + """Update existing record or create new one (async implementation). + + Args: + record: Memory record to update. + """ + client = await self._get_client() + record_key = f"record:{record.id}" + + # Fetch existing record to preserve created_at and get old index values + existing_data = await client.hgetall(record_key) + + if existing_data: + # Convert bytes to strings for parsing (skip embedding which is binary) + str_data: dict[str, str] = {} + for key, value in existing_data.items(): + str_key = key.decode("utf-8") if isinstance(key, bytes) else key + # Skip embedding field - it's binary data, not UTF-8 + if str_key == "embedding": + continue + # Handle other binary fields gracefully + if isinstance(value, bytes): + try: + str_value = value.decode("utf-8") + except UnicodeDecodeError: + continue # Skip fields that can't be decoded + else: + str_value = value + str_data[str_key] = str_value + + # Preserve created_at from existing record + try: + original_created_at = datetime.fromisoformat(str_data["created_at"]) + record.created_at = original_created_at + except (KeyError, ValueError) as e: + _logger.warning( + f"Could not preserve created_at for record {record.id}: {e}" + ) + + # Update last_accessed to current time + record.last_accessed = datetime.now() + + # Parse old values for index cleanup + try: + old_scope = str_data.get("scope", "") + # Handle both TAG format (comma-separated) and JSON format (legacy) + categories_str = str_data.get("categories", "") + if categories_str.startswith("["): + old_categories = json.loads(categories_str) + else: + old_categories = [ + c.strip() for c in categories_str.split(",") if c.strip() + ] + old_metadata = json.loads(str_data.get("metadata", "{}")) + except (json.JSONDecodeError, ValueError) as e: + _logger.warning( + f"Could not parse old index values for record {record.id}: {e}" + ) + old_scope = "" + old_categories = [] + old_metadata = {} + + # Remove from old indexes + await self._remove_from_indexes( + record.id, old_scope, old_categories, old_metadata + ) + + # Convert record to hash fields + record_dict = self._record_to_dict(record) + + # Store updated record hash + hset_cmd: list[str | bytes] = ["HSET", record_key] + for field, value in record_dict.items(): # type: ignore[assignment] + hset_cmd.append(field) + hset_cmd.append(value) + await client.custom_command(hset_cmd) + + # Add to new indexes + timestamp = record.created_at.timestamp() + await self._update_indexes( + record.id, record.scope, record.categories, record.metadata, timestamp + ) + + async def adelete( + self, + scope_prefix: str | None = None, + categories: list[str] | None = None, + record_ids: list[str] | None = None, + older_than: datetime | None = None, + metadata_filter: dict[str, Any] | None = None, + ) -> int: + """Delete records matching criteria. + + Supports deletion by record_ids, scope_prefix, categories, older_than, metadata_filter. + Multiple criteria are combined with AND logic. + + Note: + Operations are issued as individual commands, not wrapped in + MULTI/EXEC. Partial failures are possible under network errors. + + Args: + scope_prefix: Delete records in scope and subscopes. + categories: Delete records matching any of these categories. + record_ids: List of specific record IDs to delete. + older_than: Delete records created before this datetime. + metadata_filter: Delete records matching metadata key-value pairs. + + Returns: + Count of deleted records. + + Raises: + RuntimeError: If Valkey connection fails. + """ + client = await self._get_client() + + # Step 1: Identify records to delete based on criteria + ids_to_delete: set[str] = set() + + # Filter by record_ids + if record_ids: + ids_to_delete.update(record_ids) + + # Filter by scope_prefix + if scope_prefix is not None: + scope_ids = await self._find_records_by_scope(scope_prefix) + if ids_to_delete: + ids_to_delete &= set(scope_ids) # AND logic + else: + ids_to_delete.update(scope_ids) + + # Filter by categories + if categories: + category_ids = await self._find_records_by_categories(categories) + if ids_to_delete: + ids_to_delete &= set(category_ids) # AND logic + else: + ids_to_delete.update(category_ids) + + # Filter by older_than + if older_than is not None: + old_ids = await self._find_records_older_than(older_than) + if ids_to_delete: + ids_to_delete &= set(old_ids) # AND logic + else: + ids_to_delete.update(old_ids) + + # Filter by metadata + if metadata_filter: + metadata_ids = await self._find_records_by_metadata(metadata_filter) + if ids_to_delete: + ids_to_delete &= set(metadata_ids) # AND logic + else: + ids_to_delete.update(metadata_ids) + + # If no criteria specified, delete nothing + if not ids_to_delete: + return 0 + + # Step 2: Fetch record data to identify which indexes to clean + records_data = await self._fetch_records_for_deletion(list(ids_to_delete)) + + # Step 3: Delete records and clean indexes + for record_id, data in records_data.items(): + record_key = f"record:{record_id}" + + # Delete record hash (Valkey Search auto-removes from vector index) + await client.delete([record_key]) + + # Remove from all index structures + await self._remove_from_indexes( + record_id, data["scope"], data["categories"], data["metadata"] + ) + + return len(records_data) + + def delete( + self, + scope_prefix: str | None = None, + categories: list[str] | None = None, + record_ids: list[str] | None = None, + older_than: datetime | None = None, + metadata_filter: dict[str, Any] | None = None, + ) -> int: + """Delete records matching criteria (sync wrapper). + + Args: + scope_prefix: Delete records in scope and subscopes. + categories: Delete records matching any of these categories. + record_ids: List of specific record IDs to delete. + older_than: Delete records created before this datetime. + metadata_filter: Delete records matching metadata key-value pairs. + + Returns: + Count of deleted records. + + Raises: + RuntimeError: If Valkey connection fails or called from async context. + """ + result: int = self._run_async( + self.adelete( + scope_prefix=scope_prefix, + categories=categories, + record_ids=record_ids, + older_than=older_than, + metadata_filter=metadata_filter, + ) + ) + return result + + async def _find_records_by_scope(self, scope_prefix: str) -> list[str]: + """Find all record IDs in scope and subscopes. + + Args: + scope_prefix: Scope path prefix to match. + + Returns: + List of record IDs in matching scopes. + """ + client = await self._get_client() + record_ids: set[str] = set() + + # Scan for all scope keys + cursor: str | bytes = "0" + while True: + result = await client.scan(cursor, match="scope:*", count=1000) + cursor_new: str | bytes = result[0] # type: ignore[assignment] + keys: list[bytes] = result[1] # type: ignore[assignment] + + for key_bytes in keys: + # Extract scope path from key + key_str = ( + key_bytes.decode("utf-8") + if isinstance(key_bytes, bytes) + else key_bytes + ) + scope_path = key_str.split(":", 1)[1] if ":" in key_str else "" + + # Check if scope matches prefix + if scope_path.startswith(scope_prefix): + # Get all record IDs in this scope using custom command + # ZRANGE key 0 -1 + key_for_cmd = ( + key_bytes + if isinstance(key_bytes, bytes) + else key_bytes.encode("utf-8") + ) + members_result = await client.custom_command( + [b"ZRANGE", key_for_cmd, b"0", b"-1"] + ) + # Convert result to list if needed + if isinstance(members_result, list): + # Convert bytes to strings + str_members = [ + m.decode("utf-8") if isinstance(m, bytes) else str(m) + for m in members_result + ] + record_ids.update(str_members) + + # Check if cursor is 0 (scan complete) + cursor_str = ( + cursor_new.decode("utf-8") + if isinstance(cursor_new, bytes) + else cursor_new + ) + if cursor_str == "0": + break + cursor = cursor_new + + return list(record_ids) + + async def _find_records_by_categories(self, categories: list[str]) -> list[str]: + """Find all record IDs matching any of the categories. + + Args: + categories: List of category names. + + Returns: + List of record IDs with any of the categories. + """ + client = await self._get_client() + record_ids: set[str] = set() + + for category in categories: + category_key = f"category:{category}" + members = await client.smembers(category_key) + # Convert bytes to strings + str_members = [ + m.decode("utf-8") if isinstance(m, bytes) else m for m in members + ] + record_ids.update(str_members) + + return list(record_ids) + + async def _find_records_older_than(self, older_than: datetime) -> list[str]: + """Find all record IDs created before the specified datetime. + + Args: + older_than: Datetime threshold. + + Returns: + List of record IDs created before older_than. + """ + client = await self._get_client() + record_ids: set[str] = set() + threshold = older_than.timestamp() + + # Scan all scope keys and filter by timestamp + cursor: str | bytes = "0" + while True: + result = await client.scan(cursor, match="scope:*", count=1000) + cursor_new: str | bytes = result[0] # type: ignore[assignment] + keys: list[bytes] = result[1] # type: ignore[assignment] + + for key_bytes in keys: + # Get records with score (timestamp) less than threshold using custom command + # ZRANGEBYSCORE key 0 threshold + key_for_cmd = ( + key_bytes + if isinstance(key_bytes, bytes) + else key_bytes.encode("utf-8") + ) + threshold_bytes = str(threshold).encode("utf-8") + members_result = await client.custom_command( + [b"ZRANGEBYSCORE", key_for_cmd, b"0", threshold_bytes] + ) + # Convert result to list if needed + if isinstance(members_result, list): + # Convert bytes to strings + str_members = [ + m.decode("utf-8") if isinstance(m, bytes) else str(m) + for m in members_result + ] + record_ids.update(str_members) + + # Check if cursor is 0 (scan complete) + cursor_str = ( + cursor_new.decode("utf-8") + if isinstance(cursor_new, bytes) + else cursor_new + ) + if cursor_str == "0": + break + cursor = cursor_new + + return list(record_ids) + + async def _find_records_by_metadata( + self, metadata_filter: dict[str, Any] + ) -> list[str]: + """Find all record IDs matching all metadata criteria (AND logic). + + Args: + metadata_filter: Dictionary of metadata key-value pairs. + + Returns: + List of record IDs matching all metadata criteria. + """ + client = await self._get_client() + + # Get record IDs for each metadata criterion + metadata_sets: list[set[str]] = [] + for key, value in metadata_filter.items(): + value_str = str(value) + metadata_key = f"metadata:{key}:{value_str}" + members = await client.smembers(metadata_key) + # Convert bytes to strings + str_members = { + m.decode("utf-8") if isinstance(m, bytes) else m for m in members + } + metadata_sets.append(str_members) + + # Compute intersection (AND logic) + if not metadata_sets: + return [] + + result = metadata_sets[0] + for s in metadata_sets[1:]: + result &= s + + return list(result) + + async def _fetch_records_for_deletion( + self, record_ids: list[str] + ) -> dict[str, dict[str, Any]]: + """Fetch record data needed for index cleanup. + + Args: + record_ids: List of record IDs to fetch. + + Returns: + Dictionary mapping record ID to parsed record data. + """ + client = await self._get_client() + records_data: dict[str, dict[str, Any]] = {} + + for record_id in record_ids: + record_key = f"record:{record_id}" + data = await client.hgetall(record_key) + + if data: + # Convert bytes to strings (skip embedding which is binary) + str_data: dict[str, str] = {} + for key, value in data.items(): + str_key = key.decode("utf-8") if isinstance(key, bytes) else key + # Skip embedding field - it's binary + if str_key == "embedding": + continue + # Handle other binary fields gracefully + if isinstance(value, bytes): + try: + str_value = value.decode("utf-8") + except UnicodeDecodeError: + continue # Skip fields that can't be decoded + else: + str_value = value + str_data[str_key] = str_value + + # Parse categories and metadata for index cleanup + try: + # Parse categories — handle both TAG (comma-separated) and JSON format + categories_str = str_data.get("categories", "") + if categories_str and categories_str.startswith("["): + categories = json.loads(categories_str) + elif categories_str: + categories = [ + c.strip() for c in categories_str.split(",") if c.strip() + ] + else: + categories = [] + + parsed_data = { + "scope": str_data.get("scope", ""), + "categories": categories, + "metadata": json.loads(str_data.get("metadata", "{}")) + if str_data.get("metadata") + else {}, + } + records_data[record_id] = parsed_data + except (json.JSONDecodeError, ValueError) as e: + _logger.warning( + f"Could not parse record {record_id} for deletion: {e}" + ) + # Still delete the record, just skip index cleanup + records_data[record_id] = { + "scope": "", + "categories": [], + "metadata": {}, + } + + return records_data + + async def _vector_search( + self, + query_embedding: list[float], + scope_prefix: str | None = None, + categories: list[str] | None = None, + metadata_filter: dict[str, Any] | None = None, + limit: int = 10, + min_score: float = 0.0, + ) -> list[tuple[MemoryRecord, float]]: + """Perform server-side vector search using Valkey Search. + + Uses FT.SEARCH command with KNN query for vector similarity. + Applies filters for scope, categories, and metadata in the same query. + + Args: + query_embedding: Embedding vector for the query. + scope_prefix: Optional scope path prefix to filter results. + categories: Optional list of categories (OR logic). + metadata_filter: Optional metadata key-value pairs (AND logic). + limit: Maximum number of results to return. + min_score: Minimum similarity score threshold (0.0 to 1.0). + + Returns: + List of (MemoryRecord, score) tuples ordered by descending score. + + Raises: + RuntimeError: If Valkey Search module is not available. + """ + client = await self._get_client() + + # Ensure vector index exists + await self._ensure_vector_index() + + # Build query components + query_parts: list[str] = [] + + # Scope prefix filter + # Format: @scope:{prefix*} + if scope_prefix: + # Escape special characters in scope prefix + escaped_scope = self._escape_search_query(scope_prefix) + # For root scope "/", match everything + if scope_prefix == "/": + query_parts.append("*") + else: + query_parts.append(f"@scope:{{{escaped_scope}*}}") + + # Category filter (OR logic) + # Format: @categories:{cat1|cat2|cat3} + if categories: + # Escape each category and join with | + escaped_categories = [self._escape_search_query(cat) for cat in categories] + cat_query = "|".join(escaped_categories) + query_parts.append(f"@categories:{{{cat_query}}}") + + # Metadata filters (AND logic) + # Format: @{key}:{value} + if metadata_filter: + for key, value in metadata_filter.items(): + # Escape key and value + escaped_key = self._escape_search_query(key) + escaped_value = self._escape_search_query(str(value)) + query_parts.append(f"@{escaped_key}:{{{escaped_value}}}") + + # Combine filters + filter_query = " ".join(query_parts) if query_parts else "*" + + # Build KNN query with filters + # Format: (filter)=>[KNN limit @field $BLOB AS score] + # Note: Don't wrap single "*" in parentheses + if filter_query == "*": + query = f"{filter_query}=>[KNN {limit} @embedding $BLOB AS score]" + else: + query = f"({filter_query})=>[KNN {limit} @embedding $BLOB AS score]" + + # Prepare embedding blob for PARAMS + embedding_blob = self._embedding_to_bytes(query_embedding) + + # Build FT.SEARCH command + # FT.SEARCH index query [PARAMS nargs name value ...] [RETURN count field ...] [LIMIT offset num] + # Note: Vector search results are automatically sorted by score (descending) + # so we don't need an explicit SORTBY clause + search_cmd: list[str | bytes] = [ + "FT.SEARCH", + "memory_index", + query, + "PARAMS", + "2", + "BLOB", + embedding_blob, + "RETURN", + "11", # Increased from 10 to include score + "id", + "content", + "scope", + "categories", + "metadata", + "importance", + "created_at", + "last_accessed", + "source", + "private", + "score", # Add score field + "LIMIT", + "0", + str(limit), + ] + + try: + # Execute FT.SEARCH + result = await client.custom_command(search_cmd) + + # Parse results + # Result format can be either: + # 1. Flat list: [total_count, doc1_key, [field1, value1, ...], doc2_key, ...] + # 2. Dict format: [total_count, {doc1_key: {field: value, ...}, doc2_key: {...}}] + if not result or not isinstance(result, list) or len(result) < 1: + return [] + + # First element is total count + total_count_raw = result[0] + if isinstance(total_count_raw, (int, str)): + total_count = int(total_count_raw) if total_count_raw else 0 + else: + total_count = 0 + if total_count == 0: + return [] + + # Parse documents - check format + records: list[tuple[MemoryRecord, float]] = [] + + # Check if result[1] is a dict (new format) or a key (old format) + if len(result) > 1 and isinstance(result[1], dict): + # New dictionary format: [count, {key1: {fields...}, key2: {fields...}}] + docs_dict = result[1] + for doc_fields in docs_dict.values(): + field_dict = self._normalize_field_dict(doc_fields) + parsed = self._parse_search_result(field_dict, min_score) + if parsed is not None: + records.append(parsed) + else: + # Old flat list format: [count, key1, [fields...], key2, [fields...]] + i = 1 # Start after total count + while i < len(result): + if i + 1 >= len(result): + break + + doc_fields = result[i + 1] + if not isinstance(doc_fields, list): + i += 2 + continue + + # Convert flat [field, value, field, value, ...] to dict + raw: dict[Any, Any] = {} + for j in range(0, len(doc_fields), 2): + if j + 1 < len(doc_fields): + raw[doc_fields[j]] = doc_fields[j + 1] + + field_dict = self._normalize_field_dict(raw) + parsed = self._parse_search_result(field_dict, min_score) + if parsed is not None: + records.append(parsed) + + i += 2 + + # Sort by score descending (should already be sorted, but ensure) + records.sort(key=lambda x: x[1], reverse=True) + + return records + + except Exception as e: + error_msg = str(e).lower() + if "unknown command" in error_msg or "ft.search" in error_msg: + raise RuntimeError( + "Valkey Search module is not available. " + "Please ensure Valkey is running with the Search module loaded." + ) from e + _logger.error(f"Vector search failed: {e}") + raise + + # ------------------------------------------------------------------ + # Search result parsing helpers + # ------------------------------------------------------------------ + + @staticmethod + def _normalize_field_dict(raw: dict[Any, Any]) -> dict[str, Any]: + """Convert a raw field dict (possibly bytes keys/values) to str keys. + + Embedding values are kept as bytes; all other bytes values are decoded + to UTF-8 (falling back to raw bytes on decode errors). + """ + out: dict[str, Any] = {} + for key, value in raw.items(): + str_key = key.decode("utf-8") if isinstance(key, bytes) else str(key) + if isinstance(value, bytes): + if str_key == "embedding": + out[str_key] = value + else: + try: + out[str_key] = value.decode("utf-8") + except UnicodeDecodeError: + out[str_key] = value + else: + out[str_key] = value + return out + + def _parse_search_result( + self, + field_dict: dict[str, Any], + min_score: float, + ) -> tuple[MemoryRecord, float] | None: + """Extract score, apply min_score filter, and deserialize a search hit. + + Score is converted from cosine distance ([0, 2]) to similarity ([0, 1]) + and clamped to that range. + + Returns: + (MemoryRecord, score) or None if filtered out or deserialization fails. + """ + # Extract score — Valkey Search returns cosine distance + score = 0.0 + for score_key in ("__score", "score"): + if score_key in field_dict: + distance = float(field_dict[score_key]) + score = max(0.0, min(1.0, 1.0 - (distance / 2.0))) + break + + if score < min_score: + return None + + record = self._dict_to_record(field_dict) + if record is None: + return None + return (record, score) + + def _escape_search_query(self, text: str) -> str: + """Escape special characters in Valkey Search query. + + Valkey Search uses special characters: , . < > { } [ ] " ' : ; ! @ # $ % ^ & * ( ) - + = ~ | + + Args: + text: Text to escape. + + Returns: + Escaped text safe for use in search queries. + """ + # Characters that need escaping in Valkey Search queries + special_chars = r",.<>{}[]\"':;!@#$%^&*()-+=~|" + for char in special_chars: + text = text.replace(char, f"\\{char}") + return text + + async def asearch( + self, + query_embedding: list[float], + scope_prefix: str | None = None, + categories: list[str] | None = None, + metadata_filter: dict[str, Any] | None = None, + limit: int = 10, + min_score: float = 0.0, + ) -> list[tuple[MemoryRecord, float]]: + """Search for memories by vector similarity (async). + + Uses Valkey Search module for server-side vector similarity computation. + Applies filters for scope, categories, and metadata in the same query. + + Args: + query_embedding: Embedding vector for the query. + scope_prefix: Optional scope path prefix to filter results. + categories: Optional list of categories (OR logic). + metadata_filter: Optional metadata key-value pairs (AND logic). + limit: Maximum number of results to return. + min_score: Minimum similarity score threshold (0.0 to 1.0). + + Returns: + List of (MemoryRecord, score) tuples ordered by relevance (descending score). + + Raises: + RuntimeError: If Valkey Search module is not available. + """ + return await self._vector_search( + query_embedding, + scope_prefix, + categories, + metadata_filter, + limit, + min_score, + ) + + def search( + self, + query_embedding: list[float], + scope_prefix: str | None = None, + categories: list[str] | None = None, + metadata_filter: dict[str, Any] | None = None, + limit: int = 10, + min_score: float = 0.0, + ) -> list[tuple[MemoryRecord, float]]: + """Search for memories by vector similarity (sync wrapper). + + Uses Valkey Search module for server-side vector similarity computation. + Applies filters for scope, categories, and metadata in the same query. + + Args: + query_embedding: Embedding vector for the query. + scope_prefix: Optional scope path prefix to filter results. + categories: Optional list of categories (OR logic). + metadata_filter: Optional metadata key-value pairs (AND logic). + limit: Maximum number of results to return. + min_score: Minimum similarity score threshold (0.0 to 1.0). + + Returns: + List of (MemoryRecord, score) tuples ordered by relevance (descending score). + + Raises: + RuntimeError: If Valkey Search module is not available or called from async context. + """ + result: list[tuple[MemoryRecord, float]] = self._run_async( + self.asearch( + query_embedding, + scope_prefix, + categories, + metadata_filter, + limit, + min_score, + ) + ) + return result + + def list_records( + self, + scope_prefix: str | None = None, + limit: int = 200, + offset: int = 0, + ) -> list[MemoryRecord]: + """List records in a scope, newest first. + + Uses scope sorted set ZRANGE with REV flag for newest-first ordering. + Supports scope_prefix filtering and pagination via limit and offset. + + Args: + scope_prefix: Optional scope path prefix to filter by. + limit: Maximum number of records to return (default 200). + offset: Number of records to skip for pagination (default 0). + + Returns: + List of MemoryRecord, ordered by created_at descending (newest first). + """ + result: list[MemoryRecord] = self._run_async( + self._alist_records(scope_prefix, limit, offset) + ) + return result + + async def _alist_records( + self, + scope_prefix: str | None = None, + limit: int = 200, + offset: int = 0, + ) -> list[MemoryRecord]: + """List records in a scope, newest first (async implementation). + + Args: + scope_prefix: Optional scope path prefix to filter by. + limit: Maximum number of records to return. + offset: Number of records to skip for pagination. + + Returns: + List of MemoryRecord, ordered by created_at descending. + """ + client = await self._get_client() + + # Find all record IDs in scope(s) + if scope_prefix is not None: + # Get records from matching scopes + record_ids = await self._find_records_by_scope(scope_prefix) + else: + # Get all records from all scopes + record_ids = [] + cursor: str | bytes = "0" + while True: + result = await client.scan(cursor, match="scope:*", count=1000) + cursor_new: str | bytes = result[0] # type: ignore[assignment] + keys: list[bytes] = result[1] # type: ignore[assignment] + + for key_bytes in keys: + # Get all record IDs in this scope + key_for_cmd = ( + key_bytes + if isinstance(key_bytes, bytes) + else key_bytes.encode("utf-8") + ) + members_result = await client.custom_command( + [b"ZRANGE", key_for_cmd, b"0", b"-1"] + ) + if isinstance(members_result, list): + str_members = [ + m.decode("utf-8") if isinstance(m, bytes) else str(m) + for m in members_result + ] + record_ids.extend(str_members) + + # Check if cursor is 0 (scan complete) + cursor_str = ( + cursor_new.decode("utf-8") + if isinstance(cursor_new, bytes) + else cursor_new + ) + if cursor_str == "0": + break + cursor = cursor_new + + # Fetch records and sort by created_at descending + records: list[MemoryRecord] = [] + for record_id in record_ids: + record = await self._aget_record(record_id) + if record: + records.append(record) + + # Sort by created_at descending (newest first) + records.sort(key=lambda r: r.created_at, reverse=True) + + # Apply pagination + return records[offset : offset + limit] + + def get_scope_info(self, scope: str) -> ScopeInfo: + """Get information about a scope. + + Counts records in scope and subscopes using sorted set cardinality. + Extracts categories used within scope. + Finds oldest and newest record timestamps. + Lists immediate child scope paths. + + Args: + scope: The scope path. + + Returns: + ScopeInfo with record count, categories, date range, child scopes. + """ + result: ScopeInfo = self._run_async(self._aget_scope_info(scope)) + return result + + async def _aget_scope_info(self, scope: str) -> ScopeInfo: + """Get information about a scope (async implementation). + + Args: + scope: The scope path. + + Returns: + ScopeInfo with record count, categories, date range, child scopes. + """ + # Normalize scope path + scope = scope.rstrip("/") or "/" + prefix = scope if scope != "/" else "" + + # Find all record IDs in scope and subscopes + record_ids = await self._find_records_by_scope(prefix or "/") + + if not record_ids: + return ScopeInfo( + path=scope, + record_count=0, + categories=[], + oldest_record=None, + newest_record=None, + child_scopes=[], + ) + + # Fetch records to extract categories and timestamps + categories_set: set[str] = set() + oldest: datetime | None = None + newest: datetime | None = None + + for record_id in record_ids: + record = await self._aget_record(record_id) + if record: + # Collect categories + categories_set.update(record.categories) + + # Track oldest and newest timestamps + if oldest is None or record.created_at < oldest: + oldest = record.created_at + if newest is None or record.created_at > newest: + newest = record.created_at + + # Find immediate child scopes + child_scopes = await self._alist_scopes(scope) + + return ScopeInfo( + path=scope, + record_count=len(record_ids), + categories=sorted(categories_set), + oldest_record=oldest, + newest_record=newest, + child_scopes=child_scopes, + ) + + def list_scopes(self, parent: str = "/") -> list[str]: + """List immediate child scopes under a parent path. + + Defaults to root scope "/" when no parent specified. + Parses scope paths from scope sorted set keys. + Returns only immediate children, not grandchildren. + + Args: + parent: Parent scope path (default root "/"). + + Returns: + List of immediate child scope paths in sorted order. + """ + result: list[str] = self._run_async(self._alist_scopes(parent)) + return result + + async def _alist_scopes(self, parent: str = "/") -> list[str]: + """List immediate child scopes under a parent path (async implementation). + + Args: + parent: Parent scope path (default root "/"). + + Returns: + List of immediate child scope paths in sorted order. + """ + client = await self._get_client() + + # Normalize parent path + parent = parent.rstrip("/") or "" + prefix = (parent + "/") if parent else "/" + + # Scan for all scope keys + children: set[str] = set() + cursor: str | bytes = "0" + while True: + result = await client.scan(cursor, match="scope:*", count=1000) + cursor_new: str | bytes = result[0] # type: ignore[assignment] + keys: list[bytes] = result[1] # type: ignore[assignment] + + for key_bytes in keys: + # Extract scope path from key + key_str = ( + key_bytes.decode("utf-8") + if isinstance(key_bytes, bytes) + else key_bytes + ) + scope_path = key_str.split(":", 1)[1] if ":" in key_str else "" + + # Check if scope is a child of parent + if scope_path.startswith(prefix) and scope_path != ( + prefix.rstrip("/") or "/" + ): + # Extract the immediate child component + rest = scope_path[len(prefix) :] + first_component = rest.split("/", 1)[0] + if first_component: + child_path = prefix + first_component + children.add(child_path) + + # Check if cursor is 0 (scan complete) + cursor_str = ( + cursor_new.decode("utf-8") + if isinstance(cursor_new, bytes) + else cursor_new + ) + if cursor_str == "0": + break + cursor = cursor_new + + return sorted(children) + + def list_categories(self, scope_prefix: str | None = None) -> dict[str, int]: + """List categories and their counts within a scope. + + Supports filtering by scope_prefix. + Computes counts by measuring category set cardinality. + Returns global category counts when scope_prefix is None. + + Args: + scope_prefix: Optional scope to limit to (None = global). + + Returns: + Mapping of category name to record count. + """ + result: dict[str, int] = self._run_async(self._alist_categories(scope_prefix)) + return result + + async def _alist_categories( + self, scope_prefix: str | None = None + ) -> dict[str, int]: + """List categories and their counts within a scope (async implementation). + + Args: + scope_prefix: Optional scope to limit to (None = global). + + Returns: + Mapping of category name to record count. + """ + client = await self._get_client() + + if scope_prefix is not None: + # Get records in scope and count their categories + record_ids = await self._find_records_by_scope(scope_prefix) + counts: dict[str, int] = {} + + for record_id in record_ids: + record = await self._aget_record(record_id) + if record: + for category in record.categories: + counts[category] = counts.get(category, 0) + 1 + + return counts + # Global category counts - scan all category sets + counts = {} + cursor: str | bytes = "0" + while True: + result = await client.scan(cursor, match="category:*", count=1000) + cursor_new: str | bytes = result[0] # type: ignore[assignment] + keys: list[bytes] = result[1] # type: ignore[assignment] + + for key_bytes in keys: + # Extract category name from key + key_str = ( + key_bytes.decode("utf-8") + if isinstance(key_bytes, bytes) + else key_bytes + ) + category_name = key_str.split(":", 1)[1] if ":" in key_str else "" + + if category_name: + # Get cardinality of category set + category_key = f"category:{category_name}" + count = await client.scard(category_key) + counts[category_name] = int(count) if count else 0 + + # Check if cursor is 0 (scan complete) + cursor_str = ( + cursor_new.decode("utf-8") + if isinstance(cursor_new, bytes) + else cursor_new + ) + if cursor_str == "0": + break + cursor = cursor_new + + return counts + + def count(self, scope_prefix: str | None = None) -> int: + """Count records in scope (and subscopes). + + Uses scope sorted set cardinality for efficient counting. + Supports scope_prefix filtering. + Returns total count across all scopes when scope_prefix is None. + + Args: + scope_prefix: Optional scope path (None = all). + + Returns: + Number of records. + """ + result: int = self._run_async(self._acount(scope_prefix)) + return result + + async def _acount(self, scope_prefix: str | None = None) -> int: + """Count records in scope (and subscopes) (async implementation). + + Args: + scope_prefix: Optional scope path (None = all). + + Returns: + Number of records. + """ + if scope_prefix is None or scope_prefix.strip("/") == "": + # Count all records across all scopes + record_ids = await self._find_records_by_scope("/") + return len(set(record_ids)) # Use set to deduplicate + # Count records in specific scope and subscopes + record_ids = await self._find_records_by_scope(scope_prefix) + return len(set(record_ids)) # Use set to deduplicate + + def reset(self, scope_prefix: str | None = None) -> None: + """Reset (delete all) memories in scope. + + Deletes all records in scope and subscopes when scope_prefix provided. + Deletes all records across all scopes when scope_prefix is None. + Removes all index structures atomically. + + Args: + scope_prefix: Optional scope path (None = reset all). + """ + self._run_async(self._areset(scope_prefix)) + + async def _areset(self, scope_prefix: str | None = None) -> None: + """Reset (delete all) memories in scope (async implementation). + + Args: + scope_prefix: Optional scope path (None = reset all). + """ + # Use delete with scope_prefix to remove all records + await self.adelete(scope_prefix=scope_prefix) diff --git a/lib/crewai/src/crewai/memory/types.py b/lib/crewai/src/crewai/memory/types.py index e787b569d0..44f1939f80 100644 --- a/lib/crewai/src/crewai/memory/types.py +++ b/lib/crewai/src/crewai/memory/types.py @@ -2,13 +2,17 @@ from __future__ import annotations +import concurrent.futures from datetime import datetime +import logging from typing import Any from uuid import uuid4 -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, field_validator +_logger = logging.getLogger(__name__) + # When searching the vector store, we ask for more results than the caller # requested so that post-search steps (composite scoring, deduplication, # category filtering) have enough candidates to fill the final result set. @@ -57,6 +61,26 @@ class MemoryRecord(BaseModel): repr=False, description="Vector embedding for semantic search. Excluded from serialization to save tokens.", ) + + @field_validator("embedding", mode="before") + @classmethod + def validate_embedding(cls, v: Any) -> list[float] | None: + """Ensure embedding is always list[float] or None, never bytes.""" + if v is None: + return None + if isinstance(v, bytes): + # Convert bytes to list[float] if needed + import numpy as np + + if len(v) == 0: + return None + arr = np.frombuffer(v, dtype=np.float32) + return [float(x) for x in arr] + if isinstance(v, list): + return [float(x) for x in v] + # Fallback: assume it's already a valid list[float] + return v # type: ignore[no-any-return] + source: str | None = Field( default=None, description=( @@ -304,7 +328,11 @@ def embed_text(embedder: Any, text: str) -> list[float]: """ if not text or not text.strip(): return [] + + # Just call the embedder directly - the blocking issue needs to be fixed + # at a higher level (making Memory.recall() async) result = embedder([text]) + if not result: return [] first = result[0] @@ -315,6 +343,11 @@ def embed_text(embedder: Any, text: str) -> list[float]: return list(first) +# Reusable thread pool for running embedder calls from sync context +# when an async event loop is already running. +_EMBED_POOL = concurrent.futures.ThreadPoolExecutor(max_workers=1) + + def embed_texts(embedder: Any, texts: list[str]) -> list[list[float]]: """Embed multiple texts in a single API call. @@ -328,6 +361,8 @@ def embed_texts(embedder: Any, texts: list[str]) -> list[list[float]]: Returns: List of embeddings, one per input text. Empty texts produce empty lists. """ + import asyncio + if not texts: return [] # Filter out empty texts, remembering their positions @@ -337,7 +372,23 @@ def embed_texts(embedder: Any, texts: list[str]) -> list[list[float]]: if not valid: return [[] for _ in texts] - result = embedder([t for _, t in valid]) + # Check if we're in an async context + result: Any + try: + asyncio.get_running_loop() + # We're in an async context, but this is a sync function + # Run embedder in thread pool to avoid blocking the event loop + try: + result = _EMBED_POOL.submit(embedder, [t for _, t in valid]).result( + timeout=30 + ) + except concurrent.futures.TimeoutError: + _logger.warning("Embedder timed out after 30s, returning empty embeddings") + return [[] for _ in texts] + except RuntimeError: + # Not in async context, run directly + result = embedder([t for _, t in valid]) + embeddings: list[list[float]] = [[] for _ in texts] for (orig_idx, _), emb in zip(valid, result, strict=False): if hasattr(emb, "tolist"): diff --git a/lib/crewai/src/crewai/memory/unified_memory.py b/lib/crewai/src/crewai/memory/unified_memory.py index d879bace0c..93827bac18 100644 --- a/lib/crewai/src/crewai/memory/unified_memory.py +++ b/lib/crewai/src/crewai/memory/unified_memory.py @@ -5,6 +5,7 @@ from concurrent.futures import Future, ThreadPoolExecutor import contextvars from datetime import datetime +import logging import threading import time from typing import TYPE_CHECKING, Annotated, Any, Literal @@ -36,6 +37,9 @@ from crewai.rag.embeddings.providers.openai.types import OpenAIProviderSpec +_logger = logging.getLogger(__name__) + + if TYPE_CHECKING: from chromadb.utils.embedding_functions.openai_embedding_function import ( OpenAIEmbeddingFunction, @@ -211,6 +215,17 @@ def model_post_init(self, __context: Any) -> None: from crewai.memory.storage.lancedb_storage import LanceDBStorage self._storage = LanceDBStorage() + elif self.storage == "valkey": + from crewai.memory.storage.valkey_storage import ValkeyStorage + from crewai.utilities.cache_config import parse_cache_url + + conn = parse_cache_url() or {} + self._storage = ValkeyStorage( + host=conn.get("host", "localhost"), + port=conn.get("port", 6379), + db=conn.get("db", 0), + password=conn.get("password"), + ) else: from crewai.memory.storage.lancedb_storage import LanceDBStorage @@ -316,16 +331,60 @@ def _on_save_done(self, future: Future[Any]) -> None: except Exception: # noqa: S110 pass # swallow everything during shutdown - def drain_writes(self) -> None: + def drain_writes(self, timeout_per_save: float = 60.0) -> None: """Block until all pending background saves have completed. Called automatically by ``recall()`` and should be called by the crew at shutdown to ensure no saves are lost. + + Args: + timeout_per_save: Maximum seconds to wait per save operation. + Default 60s. If a save times out, logs warning + but continues to avoid blocking crew completion. """ with self._pending_lock: pending = list(self._pending_saves) - for future in pending: - future.result() # blocks until done; re-raises exceptions + + if pending: + _logger.debug( + "[DRAIN_WRITES] Waiting for %d pending saves...", len(pending) + ) + + failed_saves = 0 + for i, future in enumerate(pending): + try: + _logger.debug( + "[DRAIN_WRITES] Waiting for save %d/%d...", i + 1, len(pending) + ) + future.result(timeout=timeout_per_save) + _logger.debug( + "[DRAIN_WRITES] Save %d/%d completed", i + 1, len(pending) + ) + except TimeoutError: # noqa: PERF203 + failed_saves += 1 + _logger.warning( + "[DRAIN_WRITES] Save %d/%d timed out after %ss. " + "This save will be abandoned. Consider increasing timeout or checking " + "LLM/embedder performance.", + i + 1, + len(pending), + timeout_per_save, + ) + # Don't raise - just log and continue to avoid blocking crew completion + except Exception as e: + failed_saves += 1 + _logger.error( + "[DRAIN_WRITES] Save %d/%d failed: %s", i + 1, len(pending), e + ) + # Don't raise - just log and continue + + if failed_saves > 0: + _logger.warning( + "[DRAIN_WRITES] %d/%d saves failed or timed out. " + "Some memories may not have been persisted.", + failed_saves, + len(pending), + ) def close(self) -> None: """Drain pending saves, flush storage, and shut down the background thread pool.""" diff --git a/lib/crewai/src/crewai/tools/memory_tools.py b/lib/crewai/src/crewai/tools/memory_tools.py index e790c93f1e..bbff39337d 100644 --- a/lib/crewai/src/crewai/tools/memory_tools.py +++ b/lib/crewai/src/crewai/tools/memory_tools.py @@ -13,14 +13,18 @@ class RecallMemorySchema(BaseModel): """Schema for the recall memory tool.""" - queries: list[str] = Field( - ..., + queries: list[str] | None = Field( + default=None, description=( - "One or more search queries. Pass a single item for a focused search, " - "or multiple items to search for several things at once." + "REQUIRED: A list of search query strings. " + "Examples: ['AI trends'], ['Python', 'machine learning'], ['vector databases']. " + "Pass a single item for a focused search, or multiple items to search for several things at once." ), + min_length=1, ) + model_config = {"extra": "forbid"} + class RecallMemoryTool(BaseTool): """Tool that lets an agent search memory for one or more queries at once.""" @@ -32,7 +36,7 @@ class RecallMemoryTool(BaseTool): def _run( self, - queries: list[str] | str, + queries: list[str] | str | None = None, **kwargs: Any, ) -> str: """Search memory for relevant information. @@ -43,9 +47,20 @@ def _run( Returns: Formatted string of matching memories, or a message if none found. """ + # Handle None or empty input + if not queries: + return "Error: Please provide search queries. Example: search_memory(queries=['AI trends'])" + + # Handle string input if isinstance(queries, str): queries = [queries] + # Filter out empty strings + queries = [q for q in queries if q and q.strip()] + + if not queries: + return "Error: Please provide non-empty search queries." + all_lines: list[str] = [] seen_ids: set[str] = set() for query in queries: @@ -63,14 +78,18 @@ def _run( class RememberSchema(BaseModel): """Schema for the remember tool.""" - contents: list[str] = Field( - ..., + contents: list[str] | None = Field( + default=None, description=( - "One or more facts, decisions, or observations to remember. " + "REQUIRED: A list of strings to save to memory. " + "Examples: ['User prefers dark mode'], ['Project deadline is March 15', 'Budget is $50k']. " "Pass a single item or multiple items at once." ), + min_length=1, ) + model_config = {"extra": "forbid"} + class RememberTool(BaseTool): """Tool that lets an agent save one or more items to memory at once.""" @@ -80,7 +99,7 @@ class RememberTool(BaseTool): args_schema: type[BaseModel] = RememberSchema memory: Any = Field(exclude=True) - def _run(self, contents: list[str] | str, **kwargs: Any) -> str: + def _run(self, contents: list[str] | str | None = None, **kwargs: Any) -> str: """Store one or more items in memory. The system infers scope, categories, and importance. Args: @@ -89,8 +108,19 @@ def _run(self, contents: list[str] | str, **kwargs: Any) -> str: Returns: Confirmation with the number of items saved. """ + # Handle None or empty input + if not contents: + return "Error: Please provide content to save. Example: save_to_memory(contents=['fact to remember'])" + if isinstance(contents, str): contents = [contents] + + # Filter out empty strings + contents = [c for c in contents if c and c.strip()] + + if not contents: + return "Error: Please provide non-empty content to save." + if len(contents) == 1: record = self.memory.remember(contents[0]) return ( diff --git a/lib/crewai/src/crewai/translations/en.json b/lib/crewai/src/crewai/translations/en.json index 51a862026f..95f5bcf38d 100644 --- a/lib/crewai/src/crewai/translations/en.json +++ b/lib/crewai/src/crewai/translations/en.json @@ -60,8 +60,8 @@ "description": "See image to understand its content, you can optionally ask a question about the image", "default_action": "Please provide a detailed description of this image, including all visual elements, context, and any notable details you can observe." }, - "recall_memory": "Search through the team's shared memory for relevant information. Pass one or more queries to search for multiple things at once. Use this when you need to find facts, decisions, preferences, or past results that may have been stored previously. IMPORTANT: For questions that require counting, summing, or listing items across multiple conversations (e.g. 'how many X', 'total Y', 'list all Z'), you MUST search multiple times with different phrasings to ensure you find ALL relevant items before giving a final count or total. Do not rely on a single search — items may be described differently across conversations.", - "save_to_memory": "Store one or more important facts, decisions, observations, or lessons in memory so they can be recalled later by you or other agents. Pass multiple items at once when you have several things worth remembering." + "recall_memory": "Search through the team's shared memory for relevant information. REQUIRED: You must provide a 'queries' parameter with a list of search strings, for example: {\"queries\": [\"search term\"]} or {\"queries\": [\"term1\", \"term2\"]}. Use this when you need to find facts, decisions, preferences, or past results that may have been stored previously. IMPORTANT: For questions that require counting, summing, or listing items across multiple conversations (e.g. 'how many X', 'total Y', 'list all Z'), you MUST search multiple times with different phrasings to ensure you find ALL relevant items before giving a final count or total. Do not rely on a single search — items may be described differently across conversations.", + "save_to_memory": "Store one or more important facts, decisions, observations, or lessons in memory so they can be recalled later by you or other agents. REQUIRED: You must provide a 'contents' parameter with a list of strings to save, for example: {\"contents\": [\"fact to remember\"]} or {\"contents\": [\"fact1\", \"fact2\"]}. Pass multiple items at once when you have several things worth remembering." }, "memory": { "query_system": "You analyze a query for searching memory.\nGiven the query and available scopes, output:\n1. keywords: Key entities or keywords that can be used to filter by category.\n2. suggested_scopes: Which available scopes are most relevant (empty for all).\n3. complexity: 'simple' or 'complex'.\n4. recall_queries: 1-3 short, targeted search phrases distilled from the query. Each should be a concise phrase optimized for semantic vector search. If the query is already short and focused, return it as-is in a single-item list. For long task descriptions, extract the distinct things worth searching for.\n5. time_filter: If the query references a time period (like 'last week', 'yesterday', 'in January'), return an ISO 8601 date string for the earliest relevant date (e.g. '2026-02-01'). Return null if no time constraint is implied.", diff --git a/lib/crewai/src/crewai/utilities/cache_config.py b/lib/crewai/src/crewai/utilities/cache_config.py new file mode 100644 index 0000000000..d13e74383d --- /dev/null +++ b/lib/crewai/src/crewai/utilities/cache_config.py @@ -0,0 +1,66 @@ +"""Shared cache configuration helpers for Valkey/Redis URL parsing.""" + +from __future__ import annotations + +import logging +import os +from typing import Any +from urllib.parse import urlparse + + +_logger = logging.getLogger(__name__) + + +def parse_cache_url() -> dict[str, Any] | None: + """Parse VALKEY_URL or REDIS_URL from environment. + + Priority: VALKEY_URL > REDIS_URL. + + Returns: + Dict with host, port, db, password keys, or None if no URL is set. + """ + url = os.environ.get("VALKEY_URL") or os.environ.get("REDIS_URL") + if not url: + return None + parsed = urlparse(url) + return { + "host": parsed.hostname or "localhost", + "port": parsed.port or 6379, + "db": ( + int(parsed.path.lstrip("/")) if parsed.path and parsed.path != "/" else 0 + ), + "password": parsed.password, + } + + +def get_aiocache_config() -> dict[str, Any]: + """Build an aiocache configuration dict from environment. + + Uses VALKEY_URL or REDIS_URL (both are Redis-wire-compatible) to + configure ``aiocache.RedisCache``. Falls back to + ``aiocache.SimpleMemoryCache`` when neither variable is set. + + Returns: + Configuration dict suitable for ``aiocache.caches.set_config()``. + """ + conn = parse_cache_url() + if conn is not None: + return { + "default": { + "cache": "aiocache.RedisCache", + "endpoint": conn["host"], + "port": conn["port"], + "db": conn.get("db", 0), + "password": conn.get("password"), + } + } + return { + "default": { + "cache": "aiocache.SimpleMemoryCache", + } + } + + +def use_valkey_cache() -> bool: + """Return True if VALKEY_URL is set in the environment.""" + return bool(os.environ.get("VALKEY_URL")) diff --git a/lib/crewai/tests/memory/storage/test_valkey_cache.py b/lib/crewai/tests/memory/storage/test_valkey_cache.py new file mode 100644 index 0000000000..923f32ccbf --- /dev/null +++ b/lib/crewai/tests/memory/storage/test_valkey_cache.py @@ -0,0 +1,499 @@ +"""Tests for ValkeyCache implementation.""" + +from __future__ import annotations + +import json +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from crewai.memory.storage.valkey_cache import ValkeyCache + + +@pytest.fixture +def mock_glide_client() -> AsyncMock: + """Create a mock GlideClient for testing.""" + client = AsyncMock() + client.get = AsyncMock() + client.set = AsyncMock() + client.delete = AsyncMock() + client.exists = AsyncMock() + client.close = AsyncMock() + return client + + +@pytest.fixture +def valkey_cache(mock_glide_client: AsyncMock) -> ValkeyCache: + """Create a ValkeyCache instance with mocked client.""" + cache = ValkeyCache(host="localhost", port=6379, db=0) + + # Mock the client creation to return our mock + async def mock_create_client() -> AsyncMock: + cache._client = mock_glide_client + return mock_glide_client + + cache._get_client = mock_create_client # type: ignore[method-assign] + return cache + + +class TestValkeyCacheBasicOperations: + """Tests for basic ValkeyCache operations (get/set/delete/exists).""" + + @pytest.mark.asyncio + async def test_set_and_get_string_value( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting and getting a string value.""" + # Mock get to return serialized string + mock_glide_client.get.return_value = json.dumps("test_value") + + # Set value + await valkey_cache.set("test_key", "test_value") + + # Verify set was called + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert call_args[0][0] == "test_key" + assert call_args[0][1] == json.dumps("test_value") + + # Get value + result = await valkey_cache.get("test_key") + + # Verify get was called and result is correct + mock_glide_client.get.assert_called_once_with("test_key") + assert result == "test_value" + + @pytest.mark.asyncio + async def test_set_and_get_dict_value( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting and getting a dictionary value.""" + test_dict = {"key1": "value1", "key2": 42, "key3": [1, 2, 3]} + mock_glide_client.get.return_value = json.dumps(test_dict) + + # Set value + await valkey_cache.set("dict_key", test_dict) + + # Verify set was called with serialized dict + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert call_args[0][0] == "dict_key" + assert call_args[0][1] == json.dumps(test_dict) + + # Get value + result = await valkey_cache.get("dict_key") + + # Verify result matches original dict + assert result == test_dict + + @pytest.mark.asyncio + async def test_set_and_get_list_value( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting and getting a list value.""" + test_list = [1, "two", 3.0, {"nested": "dict"}] + mock_glide_client.get.return_value = json.dumps(test_list) + + await valkey_cache.set("list_key", test_list) + result = await valkey_cache.get("list_key") + + assert result == test_list + + @pytest.mark.asyncio + async def test_get_nonexistent_key_returns_none( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test getting a non-existent key returns None.""" + mock_glide_client.get.return_value = None + + result = await valkey_cache.get("nonexistent_key") + + assert result is None + mock_glide_client.get.assert_called_once_with("nonexistent_key") + + @pytest.mark.asyncio + async def test_delete_key( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test deleting a key.""" + await valkey_cache.delete("test_key") + + mock_glide_client.delete.assert_called_once_with(["test_key"]) + + @pytest.mark.asyncio + async def test_exists_returns_true_for_existing_key( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test exists returns True for existing key.""" + mock_glide_client.exists.return_value = 1 + + result = await valkey_cache.exists("existing_key") + + assert result is True + mock_glide_client.exists.assert_called_once_with(["existing_key"]) + + @pytest.mark.asyncio + async def test_exists_returns_false_for_nonexistent_key( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test exists returns False for non-existent key.""" + mock_glide_client.exists.return_value = 0 + + result = await valkey_cache.exists("nonexistent_key") + + assert result is False + mock_glide_client.exists.assert_called_once_with(["nonexistent_key"]) + + +class TestValkeyCacheTTL: + """Tests for ValkeyCache TTL functionality.""" + + @pytest.mark.asyncio + async def test_set_with_explicit_ttl( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting a value with explicit TTL.""" + await valkey_cache.set("ttl_key", "value", ttl=3600) + + # Verify set was called with expiry + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert call_args[0][0] == "ttl_key" + assert call_args[0][1] == json.dumps("value") + assert "expiry" in call_args[1] + + @pytest.mark.asyncio + async def test_set_with_default_ttl( + self, mock_glide_client: AsyncMock + ) -> None: + """Test setting a value with default TTL from constructor.""" + cache = ValkeyCache(host="localhost", port=6379, default_ttl=1800) + + async def mock_create_client() -> AsyncMock: + cache._client = mock_glide_client + return mock_glide_client + + cache._get_client = mock_create_client # type: ignore[method-assign] + + await cache.set("default_ttl_key", "value") + + # Verify set was called with default TTL + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert "expiry" in call_args[1] + + @pytest.mark.asyncio + async def test_set_without_ttl( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting a value without TTL (no expiration).""" + await valkey_cache.set("no_ttl_key", "value") + + # Verify set was called without expiry + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert call_args[0][0] == "no_ttl_key" + assert call_args[0][1] == json.dumps("value") + # Should not have expiry parameter + assert "expiry" not in call_args[1] or call_args[1].get("expiry") is None + + @pytest.mark.asyncio + async def test_set_with_zero_ttl_no_expiration( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting a value with TTL=0 means no expiration.""" + await valkey_cache.set("zero_ttl_key", "value", ttl=0) + + # Verify set was called without expiry + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert "expiry" not in call_args[1] or call_args[1].get("expiry") is None + + @pytest.mark.asyncio + async def test_explicit_ttl_overrides_default( + self, mock_glide_client: AsyncMock + ) -> None: + """Test explicit TTL overrides default TTL.""" + cache = ValkeyCache(host="localhost", port=6379, default_ttl=1800) + + async def mock_create_client() -> AsyncMock: + cache._client = mock_glide_client + return mock_glide_client + + cache._get_client = mock_create_client # type: ignore[method-assign] + + await cache.set("override_key", "value", ttl=7200) + + # Verify set was called with explicit TTL (7200), not default (1800) + mock_glide_client.set.assert_called_once() + call_args = mock_glide_client.set.call_args + assert "expiry" in call_args[1] + + +class TestValkeyCacheJSONSerialization: + """Tests for ValkeyCache JSON serialization edge cases.""" + + @pytest.mark.asyncio + async def test_serialize_none_value( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing None value.""" + mock_glide_client.get.return_value = json.dumps(None) + + await valkey_cache.set("none_key", None) + result = await valkey_cache.get("none_key") + + assert result is None + + @pytest.mark.asyncio + async def test_serialize_boolean_values( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing boolean values.""" + mock_glide_client.get.side_effect = [ + json.dumps(True), + json.dumps(False), + ] + + await valkey_cache.set("true_key", True) + await valkey_cache.set("false_key", False) + + result_true = await valkey_cache.get("true_key") + result_false = await valkey_cache.get("false_key") + + assert result_true is True + assert result_false is False + + @pytest.mark.asyncio + async def test_serialize_numeric_values( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing numeric values (int, float).""" + mock_glide_client.get.side_effect = [ + json.dumps(42), + json.dumps(3.14159), + json.dumps(0), + json.dumps(-100), + ] + + await valkey_cache.set("int_key", 42) + await valkey_cache.set("float_key", 3.14159) + await valkey_cache.set("zero_key", 0) + await valkey_cache.set("negative_key", -100) + + assert await valkey_cache.get("int_key") == 42 + assert await valkey_cache.get("float_key") == 3.14159 + assert await valkey_cache.get("zero_key") == 0 + assert await valkey_cache.get("negative_key") == -100 + + @pytest.mark.asyncio + async def test_serialize_empty_collections( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing empty collections.""" + mock_glide_client.get.side_effect = [ + json.dumps([]), + json.dumps({}), + json.dumps(""), + ] + + await valkey_cache.set("empty_list", []) + await valkey_cache.set("empty_dict", {}) + await valkey_cache.set("empty_string", "") + + assert await valkey_cache.get("empty_list") == [] + assert await valkey_cache.get("empty_dict") == {} + assert await valkey_cache.get("empty_string") == "" + + @pytest.mark.asyncio + async def test_serialize_nested_structures( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing deeply nested structures.""" + nested_data = { + "level1": { + "level2": { + "level3": [1, 2, {"level4": "deep"}] + } + }, + "list": [{"a": 1}, {"b": 2}] + } + mock_glide_client.get.return_value = json.dumps(nested_data) + + await valkey_cache.set("nested_key", nested_data) + result = await valkey_cache.get("nested_key") + + assert result == nested_data + + @pytest.mark.asyncio + async def test_deserialize_invalid_json_returns_none( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test deserializing invalid JSON returns None and logs warning.""" + mock_glide_client.get.return_value = "invalid json {{" + + with patch("crewai.memory.storage.valkey_cache._logger") as mock_logger: + result = await valkey_cache.get("invalid_key") + + assert result is None + mock_logger.warning.assert_called_once() + + @pytest.mark.asyncio + async def test_serialize_unicode_strings( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing unicode strings.""" + unicode_data = "Hello 世界 🌍 Привет" + mock_glide_client.get.return_value = json.dumps(unicode_data) + + await valkey_cache.set("unicode_key", unicode_data) + result = await valkey_cache.get("unicode_key") + + assert result == unicode_data + + +class TestValkeyCacheConnectionManagement: + """Tests for ValkeyCache connection management.""" + + @pytest.mark.asyncio + async def test_lazy_client_initialization(self) -> None: + """Test client is initialized lazily on first use.""" + cache = ValkeyCache(host="localhost", port=6379) + + # Client should be None initially + assert cache._client is None + + # Mock GlideClient.create + with patch("crewai.memory.storage.valkey_cache.GlideClient") as mock_glide: + mock_client = AsyncMock() + mock_glide.create = AsyncMock(return_value=mock_client) + mock_client.get = AsyncMock(return_value=None) + + # First operation should initialize client + await cache.get("test_key") + + # Client should now be initialized + assert cache._client is not None + mock_glide.create.assert_called_once() + + @pytest.mark.asyncio + async def test_client_reuse_across_operations( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test client is reused across multiple operations.""" + mock_glide_client.get.return_value = json.dumps("value") + mock_glide_client.exists.return_value = 1 + + # Perform multiple operations + await valkey_cache.get("key1") + await valkey_cache.set("key2", "value2") + await valkey_cache.exists("key3") + await valkey_cache.delete("key4") + + # _get_client should return the same client instance + client1 = await valkey_cache._get_client() + client2 = await valkey_cache._get_client() + assert client1 is client2 + + @pytest.mark.asyncio + async def test_close_connection( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test closing the client connection.""" + # Initialize client + await valkey_cache._get_client() + assert valkey_cache._client is not None + + # Close connection + await valkey_cache.close() + + # Verify close was called and client is None + mock_glide_client.close.assert_called_once() + assert valkey_cache._client is None + + @pytest.mark.asyncio + async def test_connection_error_raises_runtime_error(self) -> None: + """Test connection error raises RuntimeError with descriptive message.""" + cache = ValkeyCache(host="invalid-host", port=9999) + + with patch("crewai.memory.storage.valkey_cache.GlideClient") as mock_glide: + mock_glide.create = AsyncMock(side_effect=Exception("Connection refused")) + + with pytest.raises(RuntimeError) as exc_info: + await cache._get_client() + + assert "Cannot connect to Valkey" in str(exc_info.value) + assert "invalid-host:9999" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_authentication_with_password(self) -> None: + """Test client initialization with password authentication.""" + cache = ValkeyCache( + host="localhost", + port=6379, + password="secret_password" + ) + + with patch("crewai.memory.storage.valkey_cache.GlideClient") as mock_glide: + mock_client = AsyncMock() + mock_glide.create = AsyncMock(return_value=mock_client) + + await cache._get_client() + + # Verify GlideClient.create was called with credentials + mock_glide.create.assert_called_once() + config = mock_glide.create.call_args[0][0] + assert hasattr(config, "credentials") + + +class TestValkeyCacheEdgeCases: + """Tests for ValkeyCache edge cases and error conditions.""" + + @pytest.mark.asyncio + async def test_set_with_special_characters_in_key( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test setting values with special characters in key.""" + special_keys = [ + "key:with:colons", + "key/with/slashes", + "key-with-dashes", + "key_with_underscores", + "key.with.dots", + ] + + for key in special_keys: + await valkey_cache.set(key, "value") + mock_glide_client.set.assert_called() + + @pytest.mark.asyncio + async def test_large_value_serialization( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test serializing large values.""" + large_list = list(range(10000)) + mock_glide_client.get.return_value = json.dumps(large_list) + + await valkey_cache.set("large_key", large_list) + result = await valkey_cache.get("large_key") + + assert result == large_list + + @pytest.mark.asyncio + async def test_concurrent_operations( + self, valkey_cache: ValkeyCache, mock_glide_client: AsyncMock + ) -> None: + """Test concurrent cache operations.""" + import asyncio + + mock_glide_client.get.return_value = json.dumps("value") + + # Perform concurrent operations + tasks = [ + valkey_cache.set(f"key{i}", f"value{i}") + for i in range(10) + ] + await asyncio.gather(*tasks) + + # Verify all operations completed + assert mock_glide_client.set.call_count == 10 diff --git a/lib/crewai/tests/memory/storage/test_valkey_storage.py b/lib/crewai/tests/memory/storage/test_valkey_storage.py new file mode 100644 index 0000000000..bb6d6ee74f --- /dev/null +++ b/lib/crewai/tests/memory/storage/test_valkey_storage.py @@ -0,0 +1,3172 @@ +"""Tests for ValkeyStorage save operation.""" + +from __future__ import annotations + +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +import pytest + +from crewai.memory.storage.valkey_storage import ValkeyStorage +from crewai.memory.types import MemoryRecord + + +@pytest.fixture +def mock_glide_client() -> AsyncMock: + """Create a mock GlideClient for testing.""" + client = AsyncMock() + client.custom_command = AsyncMock() + client.zadd = AsyncMock() + client.sadd = AsyncMock() + client.hgetall = AsyncMock(return_value={}) + client.close = AsyncMock() + return client + + +@pytest.fixture +def valkey_storage(mock_glide_client: AsyncMock) -> ValkeyStorage: + """Create a ValkeyStorage instance with mocked client.""" + storage = ValkeyStorage(host="localhost", port=6379, db=0) + + # Mock the client creation to return our mock + async def mock_create_client() -> AsyncMock: + storage._client = mock_glide_client + return mock_glide_client + + storage._get_client = mock_create_client # type: ignore[method-assign] + return storage + + +class TestValkeyStorageSave: + """Tests for ValkeyStorage save operation.""" + + @pytest.mark.asyncio + async def test_save_single_record_with_all_fields( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving a single record with all fields populated.""" + # Create a record with all fields + record = MemoryRecord( + id="test-id-123", + content="Test memory content", + scope="/agent/task", + categories=["planning", "execution"], + metadata={"agent_id": "agent-1", "priority": "high"}, + importance=0.8, + created_at=datetime(2024, 1, 1, 12, 0, 0), + last_accessed=datetime(2024, 1, 1, 12, 0, 0), + embedding=[0.1, 0.2, 0.3, 0.4], + source="test-source", + private=True, + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + # Save the record + await valkey_storage.asave([record]) + + # Verify FT.INFO was called to check index + assert mock_glide_client.custom_command.call_count >= 1 + first_call = mock_glide_client.custom_command.call_args_list[0] + assert first_call[0][0] == ["FT.INFO", "memory_index"] + + # Verify HSET was called with correct record data + hset_call = None + for call in mock_glide_client.custom_command.call_args_list: + if call[0][0][0] == "HSET": + hset_call = call + break + + assert hset_call is not None + hset_args = hset_call[0][0] + assert hset_args[0] == "HSET" + assert hset_args[1] == "record:test-id-123" + + # Verify all fields are present in HSET command + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + assert hset_dict["id"] == "test-id-123" + assert hset_dict["content"] == "Test memory content" + assert hset_dict["scope"] == "/agent/task" + assert hset_dict["source"] == "test-source" + assert hset_dict["private"] == "true" + assert hset_dict["importance"] == "0.8" + assert "embedding" in hset_dict + assert isinstance(hset_dict["embedding"], bytes) + + # Verify scope index was updated + mock_glide_client.zadd.assert_called_once() + zadd_call = mock_glide_client.zadd.call_args + assert zadd_call[0][0] == "scope:/agent/task" + assert "test-id-123" in zadd_call[0][1] + + # Verify category indexes were updated + assert mock_glide_client.sadd.call_count >= 2 + sadd_calls = [call[0] for call in mock_glide_client.sadd.call_args_list] + category_calls = [call for call in sadd_calls if call[0].startswith("category:")] + assert len(category_calls) == 2 + assert any("category:planning" in str(call) for call in category_calls) + assert any("category:execution" in str(call) for call in category_calls) + + # Verify metadata indexes were updated + metadata_calls = [call for call in sadd_calls if call[0].startswith("metadata:")] + assert len(metadata_calls) == 2 + assert any("metadata:agent_id:agent-1" in str(call) for call in metadata_calls) + assert any("metadata:priority:high" in str(call) for call in metadata_calls) + + @pytest.mark.asyncio + async def test_save_multiple_records_in_batch( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving multiple records in a single batch.""" + records = [ + MemoryRecord( + id=f"record-{i}", + content=f"Content {i}", + scope="/test", + embedding=[0.1 * i, 0.2 * i, 0.3 * i, 0.4 * i], + ) + for i in range(3) + ] + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + ] + [None] * 10 # HSET responses + + await valkey_storage.asave(records) + + # Verify HSET was called for each record + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 3 + + # Verify each record was stored + for i in range(3): + record_key = f"record:record-{i}" + assert any(record_key in str(call) for call in hset_calls) + + # Verify scope index was updated for all records + assert mock_glide_client.zadd.call_count == 3 + + @pytest.mark.asyncio + async def test_save_record_with_empty_categories_and_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving a record with empty categories and metadata.""" + record = MemoryRecord( + id="empty-fields-record", + content="Content with no categories or metadata", + scope="/test", + categories=[], + metadata={}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify record was saved + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + # Verify no category or metadata index updates + sadd_calls = mock_glide_client.sadd.call_args_list + # Should have no calls since categories and metadata are empty + assert len(sadd_calls) == 0 + + @pytest.mark.asyncio + async def test_save_record_without_embedding( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving a record without an embedding.""" + record = MemoryRecord( + id="no-embedding-record", + content="Content without embedding", + scope="/test", + embedding=None, + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify record was saved + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + # Verify embedding field is empty bytes + hset_args = hset_calls[0][0][0] + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + assert "embedding" in hset_dict + assert hset_dict["embedding"] == b"" + + @pytest.mark.asyncio + async def test_save_record_with_none_source( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving a record with None source.""" + record = MemoryRecord( + id="none-source-record", + content="Content with None source", + scope="/test", + source=None, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify record was saved + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + # Verify source field is empty string + hset_args = hset_calls[0][0][0] + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + assert hset_dict["source"] == "" + + @pytest.mark.asyncio + async def test_save_empty_list_does_nothing( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that saving an empty list does nothing.""" + await valkey_storage.asave([]) + + # Verify no operations were performed + mock_glide_client.custom_command.assert_not_called() + mock_glide_client.zadd.assert_not_called() + mock_glide_client.sadd.assert_not_called() + + @pytest.mark.asyncio + async def test_save_creates_vector_index_if_not_exists( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that save creates vector index if it doesn't exist.""" + record = MemoryRecord( + id="test-record", + content="Test content", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to fail (index doesn't exist), then FT.CREATE succeeds + mock_glide_client.custom_command.side_effect = [ + Exception("Unknown Index name"), # FT.INFO fails + None, # FT.CREATE succeeds + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify FT.CREATE was called + ft_create_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.CREATE" + ] + assert len(ft_create_calls) == 1 + + # Verify FT.CREATE command structure + create_cmd = ft_create_calls[0][0][0] + assert create_cmd[0] == "FT.CREATE" + assert create_cmd[1] == "memory_index" + assert "SCHEMA" in create_cmd + assert "embedding" in create_cmd + assert "VECTOR" in create_cmd + + @pytest.mark.asyncio + async def test_save_error_handling_for_serialization_failure( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test error handling when serialization fails.""" + # Create a record with a field that will cause serialization to fail + record = MemoryRecord( + id="bad-record", + content="Test content", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock _record_to_dict to raise an error + with patch.object( + valkey_storage, + "_record_to_dict", + side_effect=ValueError("Serialization failed"), + ): + with pytest.raises(ValueError, match="Serialization failed"): + await valkey_storage.asave([record]) + + def test_save_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync save wrapper calls async implementation.""" + record = MemoryRecord( + id="sync-test-record", + content="Test content", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + # Call sync save + valkey_storage.save([record]) + + # Verify async operations were called + assert mock_glide_client.custom_command.call_count >= 1 + + @pytest.mark.asyncio + async def test_save_with_special_characters_in_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving a record with special characters in metadata values.""" + record = MemoryRecord( + id="special-chars-record", + content="Test content", + scope="/test", + metadata={ + "key:with:colons": "value:with:colons", + "key with spaces": "value with spaces", + "key/with/slashes": "value/with/slashes", + }, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify metadata indexes were created with special characters + sadd_calls = mock_glide_client.sadd.call_args_list + metadata_calls = [call[0][0] for call in sadd_calls if call[0][0].startswith("metadata:")] + + assert len(metadata_calls) == 3 + assert any("key:with:colons:value:with:colons" in call for call in metadata_calls) + assert any("key with spaces:value with spaces" in call for call in metadata_calls) + assert any("key/with/slashes:value/with/slashes" in call for call in metadata_calls) + + @pytest.mark.asyncio + async def test_save_with_numeric_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test saving a record with numeric metadata values.""" + record = MemoryRecord( + id="numeric-metadata-record", + content="Test content", + scope="/test", + metadata={ + "count": 42, + "score": 3.14, + "is_active": True, + }, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify metadata indexes were created with string-converted values + sadd_calls = mock_glide_client.sadd.call_args_list + metadata_calls = [call[0][0] for call in sadd_calls if call[0][0].startswith("metadata:")] + + assert len(metadata_calls) == 3 + assert any("metadata:count:42" in call for call in metadata_calls) + assert any("metadata:score:3.14" in call for call in metadata_calls) + assert any("metadata:is_active:True" in call for call in metadata_calls) + + @pytest.mark.asyncio + async def test_save_preserves_datetime_precision( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that datetime fields are serialized with proper precision.""" + created_at = datetime(2024, 1, 15, 10, 30, 45, 123456) + last_accessed = datetime(2024, 1, 15, 11, 45, 30, 654321) + + record = MemoryRecord( + id="datetime-precision-record", + content="Test content", + scope="/test", + created_at=created_at, + last_accessed=last_accessed, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO to simulate index exists + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + None, # HSET response + ] + + await valkey_storage.asave([record]) + + # Verify datetime fields are in ISO format + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + hset_args = hset_calls[0][0][0] + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + assert hset_dict["created_at"] == created_at.isoformat() + assert hset_dict["last_accessed"] == last_accessed.isoformat() + + + +class TestValkeyStorageGetRecord: + """Tests for ValkeyStorage get_record operation.""" + + @pytest.mark.asyncio + async def test_retrieve_existing_record_with_all_fields( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving an existing record with all fields populated.""" + # Mock HGETALL to return a complete record + mock_glide_client.hgetall.return_value = { + "id": "test-record-123", + "content": "Test memory content", + "scope": "/agent/task", + "categories": '["planning", "execution"]', + "metadata": '{"agent_id": "agent-1", "priority": "high"}', + "importance": "0.8", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T13:00:00", + "embedding": valkey_storage._embedding_to_bytes([0.1, 0.2, 0.3, 0.4]), + "source": "test-source", + "private": "true", + } + + # Retrieve the record + record = await valkey_storage._aget_record("test-record-123") + + # Verify HGETALL was called with correct key + mock_glide_client.hgetall.assert_called_once_with("record:test-record-123") + + # Verify all fields are correctly deserialized + assert record is not None + assert record.id == "test-record-123" + assert record.content == "Test memory content" + assert record.scope == "/agent/task" + assert record.categories == ["planning", "execution"] + assert record.metadata == {"agent_id": "agent-1", "priority": "high"} + assert record.importance == 0.8 + assert record.created_at == datetime(2024, 1, 1, 12, 0, 0) + assert record.last_accessed == datetime(2024, 1, 1, 13, 0, 0) + # Check embedding with approximate comparison (float32 precision) + assert record.embedding is not None + assert len(record.embedding) == 4 + for i, expected in enumerate([0.1, 0.2, 0.3, 0.4]): + assert abs(record.embedding[i] - expected) < 1e-6 + assert record.source == "test-source" + assert record.private is True + + @pytest.mark.asyncio + async def test_retrieve_non_existent_record_returns_none( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a non-existent record returns None.""" + # Mock HGETALL to return empty dict (record doesn't exist) + mock_glide_client.hgetall.return_value = {} + + # Retrieve non-existent record + record = await valkey_storage._aget_record("non-existent-id") + + # Verify HGETALL was called + mock_glide_client.hgetall.assert_called_once_with("record:non-existent-id") + + # Verify None is returned + assert record is None + + @pytest.mark.asyncio + async def test_retrieve_record_with_empty_embedding( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a record with empty embedding.""" + # Mock HGETALL to return record with empty embedding + mock_glide_client.hgetall.return_value = { + "id": "no-embedding-record", + "content": "Content without embedding", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", # Empty bytes + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("no-embedding-record") + + # Verify record is retrieved with None embedding + assert record is not None + assert record.id == "no-embedding-record" + assert record.embedding is None + + @pytest.mark.asyncio + async def test_retrieve_record_with_none_source( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a record with None source.""" + # Mock HGETALL to return record with empty source + mock_glide_client.hgetall.return_value = { + "id": "no-source-record", + "content": "Content without source", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", # Empty string + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("no-source-record") + + # Verify record is retrieved with None source + assert record is not None + assert record.source is None + + @pytest.mark.asyncio + async def test_retrieve_record_with_false_private_flag( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a record with private=false.""" + # Mock HGETALL to return record with private=false + mock_glide_client.hgetall.return_value = { + "id": "public-record", + "content": "Public content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("public-record") + + # Verify private flag is False + assert record is not None + assert record.private is False + + @pytest.mark.asyncio + async def test_retrieve_record_with_empty_categories_and_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a record with empty categories and metadata.""" + # Mock HGETALL to return record with empty lists/dicts + mock_glide_client.hgetall.return_value = { + "id": "minimal-record", + "content": "Minimal content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("minimal-record") + + # Verify empty collections are preserved + assert record is not None + assert record.categories == [] + assert record.metadata == {} + + @pytest.mark.asyncio + async def test_deserialization_of_datetime_fields( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deserialization of datetime fields with microseconds.""" + # Mock HGETALL with datetime including microseconds + mock_glide_client.hgetall.return_value = { + "id": "datetime-record", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-15T10:30:45.123456", + "last_accessed": "2024-01-15T11:45:30.654321", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("datetime-record") + + # Verify datetime fields are correctly parsed + assert record is not None + assert record.created_at == datetime(2024, 1, 15, 10, 30, 45, 123456) + assert record.last_accessed == datetime(2024, 1, 15, 11, 45, 30, 654321) + + @pytest.mark.asyncio + async def test_deserialization_of_float_importance( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deserialization of float importance value.""" + # Mock HGETALL with various float formats + mock_glide_client.hgetall.return_value = { + "id": "float-record", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.123456789", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("float-record") + + # Verify float is correctly parsed + assert record is not None + assert abs(record.importance - 0.123456789) < 1e-9 + + @pytest.mark.asyncio + async def test_deserialization_of_json_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deserialization of JSON categories array.""" + # Mock HGETALL with multiple categories + mock_glide_client.hgetall.return_value = { + "id": "categories-record", + "content": "Test content", + "scope": "/test", + "categories": '["planning", "execution", "review", "analysis"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("categories-record") + + # Verify categories are correctly parsed + assert record is not None + assert record.categories == ["planning", "execution", "review", "analysis"] + + @pytest.mark.asyncio + async def test_deserialization_of_json_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deserialization of JSON metadata object.""" + # Mock HGETALL with complex metadata + mock_glide_client.hgetall.return_value = { + "id": "metadata-record", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": '{"agent_id": "agent-1", "count": 42, "score": 3.14, "active": true}', + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("metadata-record") + + # Verify metadata is correctly parsed + assert record is not None + assert record.metadata == { + "agent_id": "agent-1", + "count": 42, + "score": 3.14, + "active": True, + } + + @pytest.mark.asyncio + async def test_deserialization_of_binary_embedding( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deserialization of binary embedding vector.""" + # Create a test embedding + test_embedding = [0.1, 0.2, 0.3, 0.4, 0.5] + embedding_bytes = valkey_storage._embedding_to_bytes(test_embedding) + + # Mock HGETALL with binary embedding + mock_glide_client.hgetall.return_value = { + "id": "embedding-record", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": embedding_bytes, + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("embedding-record") + + # Verify embedding is correctly deserialized + assert record is not None + assert record.embedding is not None + assert len(record.embedding) == 5 + for i, val in enumerate(test_embedding): + assert abs(record.embedding[i] - val) < 1e-6 + + @pytest.mark.asyncio + async def test_handling_of_malformed_json_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test handling of non-JSON categories uses TAG fallback.""" + # Mock HGETALL with non-JSON categories (treated as TAG format) + mock_glide_client.hgetall.return_value = { + "id": "malformed-categories", + "content": "Test content", + "scope": "/test", + "categories": "not valid json [", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("malformed-categories") + + # TAG fallback: comma-split produces the raw string as a single category + assert record is not None + assert record.id == "malformed-categories" + assert record.categories == ["not valid json ["] + mock_glide_client.hgetall.assert_called_once() + + @pytest.mark.asyncio + async def test_handling_of_malformed_json_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test handling of malformed JSON in metadata field.""" + # Mock HGETALL with invalid JSON + mock_glide_client.hgetall.return_value = { + "id": "malformed-metadata", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{invalid json}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("malformed-metadata") + + # Verify None is returned and error is logged + assert record is None + + @pytest.mark.asyncio + async def test_handling_of_invalid_datetime_format( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test handling of invalid datetime format.""" + # Mock HGETALL with invalid datetime + mock_glide_client.hgetall.return_value = { + "id": "invalid-datetime", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "not a valid datetime", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("invalid-datetime") + + # Verify None is returned and error is logged + assert record is None + + @pytest.mark.asyncio + async def test_handling_of_invalid_importance_value( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test handling of invalid importance value.""" + # Mock HGETALL with non-numeric importance + mock_glide_client.hgetall.return_value = { + "id": "invalid-importance", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "not a number", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("invalid-importance") + + # Verify None is returned and error is logged + assert record is None + + @pytest.mark.asyncio + async def test_handling_of_missing_required_fields( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test handling of missing required fields.""" + # Mock HGETALL with missing fields + mock_glide_client.hgetall.return_value = { + "id": "incomplete-record", + "content": "Test content", + # Missing scope, categories, metadata, etc. + } + + # Retrieve the record + record = await valkey_storage._aget_record("incomplete-record") + + # Verify None is returned and error is logged + assert record is None + + @pytest.mark.asyncio + async def test_handling_of_connection_error( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test handling of connection error during retrieval.""" + # Mock HGETALL to raise connection error + mock_glide_client.hgetall.side_effect = Exception("Connection failed") + + # Retrieve the record + record = await valkey_storage._aget_record("test-record") + + # Verify None is returned and error is logged + assert record is None + + def test_get_record_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync get_record wrapper calls async implementation.""" + # Mock HGETALL to return a record + mock_glide_client.hgetall.return_value = { + "id": "sync-test-record", + "content": "Test content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Call sync get_record + record = valkey_storage.get_record("sync-test-record") + + # Verify async operation was called + mock_glide_client.hgetall.assert_called_once_with("record:sync-test-record") + assert record is not None + assert record.id == "sync-test-record" + + @pytest.mark.asyncio + async def test_retrieve_record_with_special_characters_in_content( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a record with special characters in content.""" + # Mock HGETALL with special characters + mock_glide_client.hgetall.return_value = { + "id": "special-chars-record", + "content": "Content with special chars: \n\t\"quotes\" 'apostrophes' & symbols", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("special-chars-record") + + # Verify special characters are preserved + assert record is not None + assert "\n" in record.content + assert "\t" in record.content + assert '"quotes"' in record.content + assert "'apostrophes'" in record.content + + @pytest.mark.asyncio + async def test_retrieve_record_with_unicode_content( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test retrieving a record with unicode content.""" + # Mock HGETALL with unicode characters + mock_glide_client.hgetall.return_value = { + "id": "unicode-record", + "content": "Unicode content: 你好 مرحبا שלום 🚀 ñ é ü", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Retrieve the record + record = await valkey_storage._aget_record("unicode-record") + + # Verify unicode is preserved + assert record is not None + assert "你好" in record.content + assert "🚀" in record.content + + + +class TestValkeyStorageUpdate: + """Tests for ValkeyStorage update operation.""" + + @pytest.mark.asyncio + async def test_update_existing_record_preserves_created_at( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating an existing record preserves created_at timestamp.""" + original_created_at = datetime(2024, 1, 1, 10, 0, 0) + original_last_accessed = datetime(2024, 1, 1, 11, 0, 0) + + # Mock HGETALL to return existing record + mock_glide_client.hgetall.return_value = { + "id": "existing-record", + "content": "Original content", + "scope": "/original/scope", + "categories": '["old-category"]', + "metadata": '{"old_key": "old_value"}', + "importance": "0.5", + "created_at": original_created_at.isoformat(), + "last_accessed": original_last_accessed.isoformat(), + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with different created_at + updated_record = MemoryRecord( + id="existing-record", + content="Updated content", + scope="/updated/scope", + categories=["new-category"], + metadata={"new_key": "new_value"}, + importance=0.8, + created_at=datetime(2024, 2, 1, 10, 0, 0), # Different created_at + last_accessed=datetime(2024, 2, 1, 11, 0, 0), + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify HGETALL was called to fetch existing record + mock_glide_client.hgetall.assert_called_once_with("record:existing-record") + + # Verify HSET was called with updated data + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + hset_args = hset_calls[0][0][0] + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + # Verify created_at was preserved from original + assert hset_dict["created_at"] == original_created_at.isoformat() + + # Verify other fields were updated + assert hset_dict["content"] == "Updated content" + assert hset_dict["scope"] == "/updated/scope" + assert hset_dict["importance"] == "0.8" + + # Verify last_accessed was updated to current time (not the one in updated_record) + last_accessed_dt = datetime.fromisoformat(hset_dict["last_accessed"]) + assert last_accessed_dt > original_last_accessed + + @pytest.mark.asyncio + async def test_update_non_existent_record_creates_new_one( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating a non-existent record creates a new one.""" + # Mock HGETALL to return empty dict (record doesn't exist) + mock_glide_client.hgetall.return_value = {} + + # Create new record + new_record = MemoryRecord( + id="new-record", + content="New content", + scope="/new/scope", + categories=["new-category"], + metadata={"key": "value"}, + importance=0.7, + created_at=datetime(2024, 1, 1, 10, 0, 0), + last_accessed=datetime(2024, 1, 1, 11, 0, 0), + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update (create) the record + await valkey_storage._aupdate(new_record) + + # Verify HGETALL was called + mock_glide_client.hgetall.assert_called_once_with("record:new-record") + + # Verify HSET was called to create the record + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + # Verify new indexes were created + mock_glide_client.zadd.assert_called_once() + assert mock_glide_client.sadd.call_count == 2 # 1 category + 1 metadata + + @pytest.mark.asyncio + async def test_update_maintains_index_consistency( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that update maintains index consistency.""" + # Mock HGETALL to return existing record + mock_glide_client.hgetall.return_value = { + "id": "indexed-record", + "content": "Original content", + "scope": "/original", + "categories": '["cat1", "cat2"]', + "metadata": '{"key1": "value1", "key2": "value2"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with same categories and metadata + updated_record = MemoryRecord( + id="indexed-record", + content="Updated content", + scope="/original", + categories=["cat1", "cat2"], + metadata={"key1": "value1", "key2": "value2"}, + importance=0.8, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify old indexes were removed + mock_glide_client.zrem.assert_called_once_with("scope:/original", ["indexed-record"]) + + # Verify old category indexes were removed + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + category_srem_calls = [call for call in srem_calls if "category:" in str(call)] + assert len(category_srem_calls) == 2 + + # Verify old metadata indexes were removed + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + assert len(metadata_srem_calls) == 2 + + # Verify new indexes were added + mock_glide_client.zadd.assert_called_once() + + # Verify new category indexes were added + sadd_calls = [call for call in mock_glide_client.sadd.call_args_list] + category_sadd_calls = [call for call in sadd_calls if "category:" in str(call[0])] + assert len(category_sadd_calls) == 2 + + # Verify new metadata indexes were added + metadata_sadd_calls = [call for call in sadd_calls if "metadata:" in str(call[0])] + assert len(metadata_sadd_calls) == 2 + + @pytest.mark.asyncio + async def test_update_removes_from_old_scope_index( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating scope removes record from old scope index.""" + # Mock HGETALL to return existing record with old scope + mock_glide_client.hgetall.return_value = { + "id": "scope-change-record", + "content": "Content", + "scope": "/old/scope", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with new scope + updated_record = MemoryRecord( + id="scope-change-record", + content="Content", + scope="/new/scope", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify removed from old scope index + mock_glide_client.zrem.assert_called_once_with( + "scope:/old/scope", ["scope-change-record"] + ) + + # Verify added to new scope index + zadd_call = mock_glide_client.zadd.call_args + assert zadd_call[0][0] == "scope:/new/scope" + assert "scope-change-record" in zadd_call[0][1] + + @pytest.mark.asyncio + async def test_update_removes_from_old_category_indexes( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating categories removes record from old category indexes.""" + # Mock HGETALL to return existing record with old categories + mock_glide_client.hgetall.return_value = { + "id": "category-change-record", + "content": "Content", + "scope": "/test", + "categories": '["old-cat1", "old-cat2", "shared-cat"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with new categories (one shared, two new) + updated_record = MemoryRecord( + id="category-change-record", + content="Content", + scope="/test", + categories=["new-cat1", "new-cat2", "shared-cat"], + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify removed from all old category indexes + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + category_srem_calls = [call for call in srem_calls if "category:" in str(call)] + assert len(category_srem_calls) == 3 + + # Verify removed from old-cat1, old-cat2, and shared-cat + srem_keys = [call[0][0] for call in category_srem_calls] + assert "category:old-cat1" in srem_keys + assert "category:old-cat2" in srem_keys + assert "category:shared-cat" in srem_keys + + # Verify added to all new category indexes + sadd_calls = [call for call in mock_glide_client.sadd.call_args_list] + category_sadd_calls = [call for call in sadd_calls if "category:" in str(call[0])] + assert len(category_sadd_calls) == 3 + + # Verify added to new-cat1, new-cat2, and shared-cat + sadd_keys = [call[0][0] for call in category_sadd_calls] + assert "category:new-cat1" in sadd_keys + assert "category:new-cat2" in sadd_keys + assert "category:shared-cat" in sadd_keys + + @pytest.mark.asyncio + async def test_update_removes_from_old_metadata_indexes( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating metadata removes record from old metadata indexes.""" + # Mock HGETALL to return existing record with old metadata + mock_glide_client.hgetall.return_value = { + "id": "metadata-change-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": '{"old_key1": "old_value1", "old_key2": "old_value2", "shared_key": "old_value"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with new metadata + updated_record = MemoryRecord( + id="metadata-change-record", + content="Content", + scope="/test", + metadata={"new_key1": "new_value1", "new_key2": "new_value2", "shared_key": "new_value"}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify removed from all old metadata indexes + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + assert len(metadata_srem_calls) == 3 + + # Verify removed from old metadata keys + srem_keys = [call[0][0] for call in metadata_srem_calls] + assert "metadata:old_key1:old_value1" in srem_keys + assert "metadata:old_key2:old_value2" in srem_keys + assert "metadata:shared_key:old_value" in srem_keys + + # Verify added to all new metadata indexes + sadd_calls = [call for call in mock_glide_client.sadd.call_args_list] + metadata_sadd_calls = [call for call in sadd_calls if "metadata:" in str(call[0])] + assert len(metadata_sadd_calls) == 3 + + # Verify added to new metadata keys + sadd_keys = [call[0][0] for call in metadata_sadd_calls] + assert "metadata:new_key1:new_value1" in sadd_keys + assert "metadata:new_key2:new_value2" in sadd_keys + assert "metadata:shared_key:new_value" in sadd_keys + + @pytest.mark.asyncio + async def test_update_with_empty_categories_removes_all_old_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating to empty categories removes all old category indexes.""" + # Mock HGETALL to return existing record with categories + mock_glide_client.hgetall.return_value = { + "id": "remove-categories-record", + "content": "Content", + "scope": "/test", + "categories": '["cat1", "cat2", "cat3"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with empty categories + updated_record = MemoryRecord( + id="remove-categories-record", + content="Content", + scope="/test", + categories=[], + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify removed from all old category indexes + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + category_srem_calls = [call for call in srem_calls if "category:" in str(call)] + assert len(category_srem_calls) == 3 + + # Verify no new category indexes were added + sadd_calls = [call for call in mock_glide_client.sadd.call_args_list] + category_sadd_calls = [call for call in sadd_calls if "category:" in str(call[0])] + assert len(category_sadd_calls) == 0 + + @pytest.mark.asyncio + async def test_update_with_empty_metadata_removes_all_old_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test updating to empty metadata removes all old metadata indexes.""" + # Mock HGETALL to return existing record with metadata + mock_glide_client.hgetall.return_value = { + "id": "remove-metadata-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": '{"key1": "value1", "key2": "value2"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with empty metadata + updated_record = MemoryRecord( + id="remove-metadata-record", + content="Content", + scope="/test", + metadata={}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify removed from all old metadata indexes + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + assert len(metadata_srem_calls) == 2 + + # Verify no new metadata indexes were added + sadd_calls = [call for call in mock_glide_client.sadd.call_args_list] + metadata_sadd_calls = [call for call in sadd_calls if "metadata:" in str(call[0])] + assert len(metadata_sadd_calls) == 0 + + @pytest.mark.asyncio + async def test_update_handles_malformed_old_data_gracefully( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test update handles malformed old data gracefully.""" + # Mock HGETALL to return record with malformed JSON + mock_glide_client.hgetall.return_value = { + "id": "malformed-record", + "content": "Content", + "scope": "/test", + "categories": "not valid json", + "metadata": "{invalid json}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record + updated_record = MemoryRecord( + id="malformed-record", + content="Updated content", + scope="/test", + categories=["new-cat"], + metadata={"new_key": "new_value"}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update should not raise an error + await valkey_storage._aupdate(updated_record) + + # Verify HSET was called (update proceeded despite malformed old data) + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + # Verify new indexes were added + mock_glide_client.zadd.assert_called_once() + assert mock_glide_client.sadd.call_count >= 2 + + @pytest.mark.asyncio + async def test_update_handles_missing_created_at_gracefully( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test update handles missing created_at in old record gracefully.""" + # Mock HGETALL to return record without created_at + mock_glide_client.hgetall.return_value = { + "id": "no-created-at-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + # Missing created_at + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with created_at + updated_record = MemoryRecord( + id="no-created-at-record", + content="Updated content", + scope="/test", + created_at=datetime(2024, 2, 1, 10, 0, 0), + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update should not raise an error + await valkey_storage._aupdate(updated_record) + + # Verify HSET was called + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + # Verify created_at from updated_record was used (since old one was missing) + hset_args = hset_calls[0][0][0] + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + # Should use the created_at from updated_record since old one was missing + assert "created_at" in hset_dict + + @pytest.mark.asyncio + async def test_update_with_numeric_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test update with numeric metadata values converts to strings.""" + # Mock HGETALL to return existing record + mock_glide_client.hgetall.return_value = { + "id": "numeric-metadata-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": '{"count": 10, "score": 5.5}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Create updated record with different numeric metadata + updated_record = MemoryRecord( + id="numeric-metadata-record", + content="Content", + scope="/test", + metadata={"count": 20, "score": 7.5, "active": True}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify removed from old metadata indexes with string-converted values + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + srem_keys = [call[0][0] for call in metadata_srem_calls] + assert "metadata:count:10" in srem_keys + assert "metadata:score:5.5" in srem_keys + + # Verify added to new metadata indexes with string-converted values + sadd_calls = [call for call in mock_glide_client.sadd.call_args_list] + metadata_sadd_calls = [call for call in sadd_calls if "metadata:" in str(call[0])] + sadd_keys = [call[0][0] for call in metadata_sadd_calls] + assert "metadata:count:20" in sadd_keys + assert "metadata:score:7.5" in sadd_keys + assert "metadata:active:True" in sadd_keys + + def test_update_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync update wrapper calls async implementation.""" + # Mock HGETALL to return empty dict (new record) + mock_glide_client.hgetall.return_value = {} + + # Create record + record = MemoryRecord( + id="sync-update-record", + content="Content", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Call sync update + valkey_storage.update(record) + + # Verify async operations were called + mock_glide_client.hgetall.assert_called_once_with("record:sync-update-record") + + @pytest.mark.asyncio + async def test_update_preserves_embedding( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that update preserves embedding correctly.""" + # Mock HGETALL to return existing record + mock_glide_client.hgetall.return_value = { + "id": "embedding-update-record", + "content": "Original content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": valkey_storage._embedding_to_bytes([0.1, 0.2, 0.3, 0.4]), + "source": "", + "private": "false", + } + + # Create updated record with new embedding + new_embedding = [0.5, 0.6, 0.7, 0.8] + updated_record = MemoryRecord( + id="embedding-update-record", + content="Updated content", + scope="/test", + embedding=new_embedding, + ) + + # Update the record + await valkey_storage._aupdate(updated_record) + + # Verify HSET was called with new embedding + hset_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "HSET" + ] + assert len(hset_calls) == 1 + + hset_args = hset_calls[0][0][0] + hset_dict = {} + for i in range(2, len(hset_args), 2): + if i + 1 < len(hset_args): + hset_dict[hset_args[i]] = hset_args[i + 1] + + # Verify embedding was updated + assert "embedding" in hset_dict + # Deserialize and check values + deserialized_embedding = valkey_storage._bytes_to_embedding(hset_dict["embedding"]) + assert len(deserialized_embedding) == 4 + for i, val in enumerate(new_embedding): + assert abs(deserialized_embedding[i] - val) < 1e-6 + + +class TestValkeyStorageDelete: + """Tests for ValkeyStorage delete operation.""" + + @pytest.mark.asyncio + async def test_delete_by_record_ids( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records by specific record IDs.""" + # Mock record data for deletion + mock_glide_client.hgetall.side_effect = [ + { + "id": "record-1", + "content": "Content 1", + "scope": "/test", + "categories": '["cat1", "cat2"]', + "metadata": '{"key1": "value1"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + { + "id": "record-2", + "content": "Content 2", + "scope": "/test", + "categories": '["cat1"]', + "metadata": '{"key1": "value2"}', + "importance": "0.6", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + ] + + # Delete by record IDs + count = await valkey_storage.adelete(record_ids=["record-1", "record-2"]) + + # Verify correct count returned + assert count == 2 + + # Verify records were deleted + delete_calls = [call for call in mock_glide_client.delete.call_args_list] + assert len(delete_calls) == 2 + + # Verify records were removed from scope indexes + zrem_calls = [call for call in mock_glide_client.zrem.call_args_list] + assert len(zrem_calls) == 2 + assert any("scope:/test" in str(call) for call in zrem_calls) + + # Verify records were removed from category indexes + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + category_srem_calls = [call for call in srem_calls if "category:" in str(call)] + assert len(category_srem_calls) >= 2 # At least cat1 and cat2 + + # Verify records were removed from metadata indexes + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + assert len(metadata_srem_calls) >= 2 # At least key1:value1 and key1:value2 + + @pytest.mark.asyncio + async def test_delete_by_scope_prefix( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records by scope prefix.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", # cursor (as bytes) + [b"scope:/agent/task1", b"scope:/agent/task2", b"scope:/other"], + ) + + # Mock custom_command for ZRANGE calls (used by _find_records_by_scope) + mock_glide_client.custom_command.side_effect = [ + ["record-1", "record-2"], # ZRANGE scope:/agent/task1 + ["record-3"], # ZRANGE scope:/agent/task2 + [], # ZRANGE scope:/other (not matched by prefix) + ] + + # Mock record data (for _fetch_records_for_deletion) + mock_glide_client.hgetall.side_effect = [ + { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/agent/task1", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + + { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/agent/task1", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + { + b"id": b"record-3", + b"content": b"Content 3", + b"scope": b"/agent/task2", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + ] + + # Delete by scope prefix + count = await valkey_storage.adelete(scope_prefix="/agent") + + # Verify correct count returned (3 records in /agent scopes) + assert count == 3 + + # Verify scan was called to find scope keys + mock_glide_client.scan.assert_called() + + # Verify custom_command was called for ZRANGE to get record IDs + assert mock_glide_client.custom_command.call_count >= 2 + + # Verify records were deleted + assert mock_glide_client.delete.call_count == 3 + + + @pytest.mark.asyncio + async def test_delete_by_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records by categories.""" + # Mock smembers to return record IDs for categories + mock_glide_client.smembers.side_effect = [ + {"record-1", "record-2", "record-3"}, # category:planning + {"record-2", "record-3", "record-4"}, # category:execution + ] + + # Mock sinter to return intersection (records with ANY category) + mock_glide_client.sunion.return_value = {"record-1", "record-2", "record-3", "record-4"} + + # Mock record data + mock_glide_client.hgetall.side_effect = [ + { + "id": "record-1", + "content": "Content 1", + "scope": "/test", + "categories": '["planning"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + { + "id": "record-2", + "content": "Content 2", + "scope": "/test", + "categories": '["planning", "execution"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + + { + "id": "record-3", + "content": "Content 3", + "scope": "/test", + "categories": '["planning", "execution"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + { + "id": "record-4", + "content": "Content 4", + "scope": "/test", + "categories": '["execution"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + ] + + # Delete by categories (OR logic - any record with planning OR execution) + count = await valkey_storage.adelete(categories=["planning", "execution"]) + + # Verify correct count returned + assert count == 4 + + # Verify records were deleted + assert mock_glide_client.delete.call_count == 4 + + + @pytest.mark.asyncio + async def test_delete_by_older_than( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records older than a specific datetime.""" + cutoff_date = datetime(2024, 1, 15, 0, 0, 0) + cutoff_timestamp = cutoff_date.timestamp() + + # Mock scan to return all scope keys + mock_glide_client.scan.return_value = ( + b"0", # cursor + [b"scope:/test"], + ) + + # Mock custom_command for ZRANGEBYSCORE to return old records + mock_glide_client.custom_command.return_value = ["record-1", "record-2"] + + # Mock record data + mock_glide_client.hgetall.side_effect = [ + { + b"id": b"record-1", + b"content": b"Old content 1", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + { + b"id": b"record-2", + b"content": b"Old content 2", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-10T10:00:00", + b"last_accessed": b"2024-01-10T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + ] + + # Delete records older than cutoff + count = await valkey_storage.adelete(older_than=cutoff_date) + + # Verify correct count returned + assert count == 2 + + # Verify scan was called + mock_glide_client.scan.assert_called() + + # Verify custom_command was called for ZRANGEBYSCORE + mock_glide_client.custom_command.assert_called_once() + custom_cmd_call = mock_glide_client.custom_command.call_args + # Check that ZRANGEBYSCORE command was used + assert custom_cmd_call[0][0][0] == b"ZRANGEBYSCORE" + + # Verify records were deleted + assert mock_glide_client.delete.call_count == 2 + + @pytest.mark.asyncio + async def test_delete_by_metadata_filter( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records by metadata filter.""" + # Mock smembers to return records matching each metadata criterion + mock_glide_client.smembers.side_effect = [ + {"record-1", "record-2", "record-3"}, # metadata:agent_id:agent-1 + {"record-1", "record-2"}, # metadata:priority:high + ] + + # Mock record data (only record-1 and record-2 match both criteria) + mock_glide_client.hgetall.side_effect = [ + { + "id": "record-1", + "content": "Content 1", + "scope": "/test", + "categories": "[]", + "metadata": '{"agent_id": "agent-1", "priority": "high"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + { + "id": "record-2", + "content": "Content 2", + "scope": "/test", + "categories": "[]", + "metadata": '{"agent_id": "agent-1", "priority": "high"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + ] + + # Delete by metadata filter (AND logic - both criteria must match) + count = await valkey_storage.adelete( + metadata_filter={"agent_id": "agent-1", "priority": "high"} + ) + + # Verify correct count returned (only records matching both criteria) + assert count == 2 + + # Verify smembers was called for each metadata criterion + assert mock_glide_client.smembers.call_count == 2 + + # Verify records were deleted + assert mock_glide_client.delete.call_count == 2 + + @pytest.mark.asyncio + async def test_delete_with_combined_criteria( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records with combined criteria (AND logic).""" + # Mock scan for scope filtering + mock_glide_client.scan.return_value = ( + b"0", # cursor (as bytes) + [b"scope:/agent/task1", b"scope:/agent/task2"], + ) + + # Mock custom_command for ZRANGE calls (used by _find_records_by_scope) + mock_glide_client.custom_command.side_effect = [ + ["record-1", "record-2", "record-3"], # ZRANGE scope:/agent/task1 + ["record-4"], # ZRANGE scope:/agent/task2 + ] + + # Mock smembers for category filtering (returns records with planning category) + # Only record-1 and record-2 have planning category (not record-4) + mock_glide_client.smembers.return_value = {"record-1", "record-2"} + + # The AND logic will intersect scope records (1,2,3,4) with category records (1,2) + # Result: record-1 and record-2 (both in /agent scope AND have planning category) + # Mock record data for the 2 matching records (for _fetch_records_for_deletion) + mock_glide_client.hgetall.side_effect = [ + { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/agent/task1", + b"categories": b'["planning"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/agent/task1", + b"categories": b'["planning"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + ] + + # Mock delete, zrem, srem operations + mock_glide_client.delete.return_value = 1 + mock_glide_client.zrem.return_value = 1 + mock_glide_client.srem.return_value = 1 + + # Delete with combined criteria: scope_prefix AND categories + count = await valkey_storage.adelete( + scope_prefix="/agent", categories=["planning"] + ) + + # Verify correct count (only records in /agent scope AND with planning category) + assert count == 2 + + # Verify both scope and category filtering were used + mock_glide_client.scan.assert_called() + mock_glide_client.smembers.assert_called() + + # Verify only matching records were deleted + assert mock_glide_client.delete.call_count == 2 + + @pytest.mark.asyncio + async def test_delete_returns_correct_count( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that delete returns the correct count of deleted records.""" + # Mock record data + mock_glide_client.hgetall.side_effect = [ + { + "id": "record-1", + "content": "Content 1", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + + { + "id": "record-2", + "content": "Content 2", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + { + "id": "record-3", + "content": "Content 3", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + ] + + # Delete 3 records + count = await valkey_storage.adelete( + record_ids=["record-1", "record-2", "record-3"] + ) + + # Verify count is exactly 3 + assert count == 3 + + @pytest.mark.asyncio + async def test_delete_with_no_matching_records_returns_zero( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that delete returns 0 when no records match criteria.""" + # Mock scan to return no matching scopes + mock_glide_client.scan.return_value = (b"0", []) + + # Delete with scope that doesn't exist + count = await valkey_storage.adelete(scope_prefix="/nonexistent") + + # Verify count is 0 + assert count == 0 + + # Verify no delete operations were performed + mock_glide_client.delete.assert_not_called() + + + @pytest.mark.asyncio + async def test_delete_removes_from_all_indexes( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that delete removes records from all index structures.""" + # Mock record with multiple categories and metadata + mock_glide_client.hgetall.return_value = { + "id": "indexed-record", + "content": "Content", + "scope": "/agent/task", + "categories": '["cat1", "cat2", "cat3"]', + "metadata": '{"key1": "value1", "key2": "value2", "key3": "value3"}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Delete the record + count = await valkey_storage.adelete(record_ids=["indexed-record"]) + + # Verify record was deleted + assert count == 1 + mock_glide_client.delete.assert_called_once_with(["record:indexed-record"]) + + # Verify removed from scope index + mock_glide_client.zrem.assert_called_once_with( + "scope:/agent/task", ["indexed-record"] + ) + + # Verify removed from all category indexes + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + category_srem_calls = [call for call in srem_calls if "category:" in str(call)] + assert len(category_srem_calls) == 3 + + category_keys = [call[0][0] for call in category_srem_calls] + assert "category:cat1" in category_keys + assert "category:cat2" in category_keys + assert "category:cat3" in category_keys + + # Verify removed from all metadata indexes + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + assert len(metadata_srem_calls) == 3 + + metadata_keys = [call[0][0] for call in metadata_srem_calls] + assert "metadata:key1:value1" in metadata_keys + assert "metadata:key2:value2" in metadata_keys + assert "metadata:key3:value3" in metadata_keys + + + @pytest.mark.asyncio + async def test_delete_with_empty_categories_list( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete with empty categories list removes no category indexes.""" + # Mock record with no categories + mock_glide_client.hgetall.return_value = { + "id": "no-categories-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Delete the record + count = await valkey_storage.adelete(record_ids=["no-categories-record"]) + + # Verify record was deleted + assert count == 1 + + # Verify no category index removals + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + category_srem_calls = [call for call in srem_calls if "category:" in str(call)] + assert len(category_srem_calls) == 0 + + @pytest.mark.asyncio + async def test_delete_with_empty_metadata_dict( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete with empty metadata dict removes no metadata indexes.""" + # Mock record with no metadata + mock_glide_client.hgetall.return_value = { + "id": "no-metadata-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Delete the record + count = await valkey_storage.adelete(record_ids=["no-metadata-record"]) + + + # Verify record was deleted + assert count == 1 + + # Verify no metadata index removals + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + assert len(metadata_srem_calls) == 0 + + @pytest.mark.asyncio + async def test_delete_with_numeric_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete with numeric metadata values converts to strings.""" + # Mock record with numeric metadata + mock_glide_client.hgetall.return_value = { + "id": "numeric-metadata-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": '{"count": 42, "score": 3.14, "active": true}', + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Delete the record + count = await valkey_storage.adelete(record_ids=["numeric-metadata-record"]) + + # Verify record was deleted + assert count == 1 + + # Verify metadata indexes were removed with string-converted values + srem_calls = [call for call in mock_glide_client.srem.call_args_list] + metadata_srem_calls = [call for call in srem_calls if "metadata:" in str(call)] + metadata_keys = [call[0][0] for call in metadata_srem_calls] + + assert "metadata:count:42" in metadata_keys + assert "metadata:score:3.14" in metadata_keys + assert "metadata:active:True" in metadata_keys + + @pytest.mark.asyncio + async def test_delete_handles_missing_record_data_gracefully( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete handles missing record data gracefully.""" + # Mock hgetall to return empty dict (record doesn't exist) + mock_glide_client.hgetall.return_value = {} + + # Delete non-existent record + count = await valkey_storage.adelete(record_ids=["non-existent-record"]) + + + # Verify count is 0 (record not found) + assert count == 0 + + # Verify no delete operations were performed + mock_glide_client.delete.assert_not_called() + + @pytest.mark.asyncio + async def test_delete_with_no_criteria_returns_zero( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete with no criteria specified returns 0.""" + # Delete with no criteria + count = await valkey_storage.adelete() + + # Verify count is 0 + assert count == 0 + + # Verify no operations were performed + mock_glide_client.delete.assert_not_called() + mock_glide_client.scan.assert_not_called() + + @pytest.mark.asyncio + async def test_delete_with_malformed_record_data( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete handles malformed record data gracefully.""" + # Mock record with malformed JSON + mock_glide_client.hgetall.return_value = { + "id": "malformed-record", + "content": "Content", + "scope": "/test", + "categories": "not valid json", + "metadata": "{invalid json}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + # Delete should not raise an error + count = await valkey_storage.adelete(record_ids=["malformed-record"]) + + # Verify record was still deleted (best effort) + assert count == 1 + mock_glide_client.delete.assert_called_once() + + def test_delete_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync delete wrapper calls async implementation.""" + # Mock record data + mock_glide_client.hgetall.return_value = { + "id": "sync-delete-record", + "content": "Content", + "scope": "/test", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + } + + + # Call sync delete + count = valkey_storage.delete(record_ids=["sync-delete-record"]) + + # Verify async operation was called + assert count == 1 + mock_glide_client.delete.assert_called_once() + + @pytest.mark.asyncio + async def test_delete_with_special_characters_in_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test delete with special characters in scope path.""" + # Mock scan to return scope with special characters + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/agent:task/sub-task"], + ) + + # Mock custom_command for ZRANGE to return record IDs + mock_glide_client.custom_command.return_value = ["record-1"] + + # Mock record data + mock_glide_client.hgetall.return_value = { + b"id": b"record-1", + b"content": b"Content", + b"scope": b"/agent:task/sub-task", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + } + + # Delete by scope with special characters + count = await valkey_storage.adelete(scope_prefix="/agent:task") + + # Verify record was deleted + assert count == 1 + mock_glide_client.delete.assert_called_once() + + @pytest.mark.asyncio + async def test_delete_multiple_records_in_single_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting multiple records in a single scope.""" + # Mock scan to return one scope + mock_glide_client.scan.return_value = (b"0", [b"scope:/test"]) + + # Mock custom_command for ZRANGE to return multiple record IDs + mock_glide_client.custom_command.return_value = [ + "record-1", + "record-2", + "record-3", + "record-4", + "record-5", + ] + + + # Mock record data for all records + mock_glide_client.hgetall.side_effect = [ + { + b"id": f"record-{i}".encode(), + b"content": f"Content {i}".encode(), + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + } + for i in range(1, 6) + ] + + # Delete all records in scope + count = await valkey_storage.adelete(scope_prefix="/test") + + # Verify all 5 records were deleted + assert count == 5 + assert mock_glide_client.delete.call_count == 5 + + @pytest.mark.asyncio + async def test_delete_with_root_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test deleting records with root scope '/'.""" + # Mock scan to return all scopes + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/", b"scope:/agent", b"scope:/task"], + ) + + # Mock custom_command for ZRANGE calls + mock_glide_client.custom_command.side_effect = [ + ["record-1"], # ZRANGE scope:/ + ["record-2"], # ZRANGE scope:/agent + ["record-3"], # ZRANGE scope:/task + ] + + # Mock record data + mock_glide_client.hgetall.side_effect = [ + { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + + { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/agent", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + { + "id": "record-3", + "content": "Content 3", + "scope": "/task", + "categories": "[]", + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T10:00:00", + "last_accessed": "2024-01-01T11:00:00", + "embedding": b"", + "source": "", + "private": "false", + }, + ] + + # Delete all records (root scope matches all) + count = await valkey_storage.adelete(scope_prefix="/") + + # Verify all records were deleted + assert count == 3 + assert mock_glide_client.delete.call_count == 3 + + @pytest.mark.asyncio + async def test_delete_preserves_unmatched_records( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that delete only removes matching records, not all records.""" + # Mock scan to return multiple scopes + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/agent", b"scope:/task"], + ) + + # Mock custom_command for ZRANGE - only /agent scope matches prefix + mock_glide_client.custom_command.side_effect = [ + ["record-1", "record-2"], # ZRANGE scope:/agent (matches) + [], # ZRANGE scope:/task (doesn't match prefix, but still scanned) + ] + + # Mock record data only for matching records + mock_glide_client.hgetall.side_effect = [ + { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/agent", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + + { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/agent", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T11:00:00", + b"embedding": b"", + b"source": b"", + b"private": b"false", + }, + ] + + # Delete only records in /agent scope + count = await valkey_storage.adelete(scope_prefix="/agent") + + # Verify only 2 records were deleted (not records in /task) + assert count == 2 + assert mock_glide_client.delete.call_count == 2 + + # Verify only /agent records were deleted + delete_calls = [call[0][0][0] for call in mock_glide_client.delete.call_args_list] + assert "record:record-1" in delete_calls + assert "record:record-2" in delete_calls + + + +class TestValkeyStorageIndexing: + """Tests for ValkeyStorage indexing system (_update_indexes and _remove_from_indexes).""" + + @pytest.mark.asyncio + async def test_update_indexes_with_simple_scope_path( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test scope index updates with a simple scope path.""" + record_id = "test-record-123" + scope = "/agent/task" + categories = ["planning"] + metadata = {"agent_id": "agent-1"} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify scope index was updated + mock_glide_client.zadd.assert_called_once_with( + "scope:/agent/task", {record_id: timestamp} + ) + + # Verify category index was updated + mock_glide_client.sadd.assert_any_call("category:planning", [record_id]) + + # Verify metadata index was updated + mock_glide_client.sadd.assert_any_call("metadata:agent_id:agent-1", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_root_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test scope index updates with root scope '/'.""" + record_id = "root-record" + scope = "/" + categories: list[str] = [] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify root scope index was created correctly + mock_glide_client.zadd.assert_called_once_with( + "scope:/", {record_id: timestamp} + ) + + # Verify no category or metadata indexes were created + assert mock_glide_client.sadd.call_count == 0 + + @pytest.mark.asyncio + async def test_update_indexes_with_nested_scope_path( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test scope index updates with deeply nested scope path.""" + record_id = "nested-record" + scope = "/agent/task/subtask/step" + categories: list[str] = [] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify nested scope index was created correctly + mock_glide_client.zadd.assert_called_once_with( + "scope:/agent/task/subtask/step", {record_id: timestamp} + ) + + @pytest.mark.asyncio + async def test_update_indexes_with_scope_containing_special_characters( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test scope index updates with special characters in scope path.""" + record_id = "special-scope-record" + scope = "/agent:123/task-456/step_789" + categories: list[str] = [] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify scope with special characters is handled correctly + mock_glide_client.zadd.assert_called_once_with( + "scope:/agent:123/task-456/step_789", {record_id: timestamp} + ) + + @pytest.mark.asyncio + async def test_update_indexes_with_multiple_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test category index updates with multiple categories.""" + record_id = "multi-category-record" + scope = "/test" + categories = ["planning", "execution", "review", "analysis"] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify all category indexes were updated + assert mock_glide_client.sadd.call_count == 4 + mock_glide_client.sadd.assert_any_call("category:planning", [record_id]) + mock_glide_client.sadd.assert_any_call("category:execution", [record_id]) + mock_glide_client.sadd.assert_any_call("category:review", [record_id]) + mock_glide_client.sadd.assert_any_call("category:analysis", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_empty_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test category index updates with empty categories list.""" + record_id = "no-categories-record" + scope = "/test" + categories: list[str] = [] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify scope index was updated + mock_glide_client.zadd.assert_called_once() + + # Verify no category indexes were created + assert mock_glide_client.sadd.call_count == 0 + + @pytest.mark.asyncio + async def test_update_indexes_with_categories_containing_special_characters( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test category index updates with special characters in category names.""" + record_id = "special-category-record" + scope = "/test" + categories = ["category:with:colons", "category-with-dashes", "category_with_underscores"] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify all category indexes were created with special characters preserved + assert mock_glide_client.sadd.call_count == 3 + mock_glide_client.sadd.assert_any_call("category:category:with:colons", [record_id]) + mock_glide_client.sadd.assert_any_call("category:category-with-dashes", [record_id]) + mock_glide_client.sadd.assert_any_call("category:category_with_underscores", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_string_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test metadata index updates with string values.""" + record_id = "string-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "agent_id": "agent-1", + "task_type": "planning", + "status": "active", + } + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify all metadata indexes were created + assert mock_glide_client.sadd.call_count == 3 + mock_glide_client.sadd.assert_any_call("metadata:agent_id:agent-1", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:task_type:planning", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:status:active", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_numeric_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test metadata index updates with numeric values (converted to strings).""" + record_id = "numeric-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "count": 42, + "score": 3.14159, + "priority": 1, + } + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify metadata values are converted to strings + assert mock_glide_client.sadd.call_count == 3 + mock_glide_client.sadd.assert_any_call("metadata:count:42", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:score:3.14159", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:priority:1", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_boolean_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test metadata index updates with boolean values (converted to strings).""" + record_id = "boolean-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "is_active": True, + "is_complete": False, + } + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify boolean values are converted to strings + assert mock_glide_client.sadd.call_count == 2 + mock_glide_client.sadd.assert_any_call("metadata:is_active:True", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:is_complete:False", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_empty_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test metadata index updates with empty metadata dict.""" + record_id = "no-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata: dict[str, str] = {} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify scope index was updated + mock_glide_client.zadd.assert_called_once() + + # Verify no metadata indexes were created + assert mock_glide_client.sadd.call_count == 0 + + @pytest.mark.asyncio + async def test_update_indexes_with_metadata_containing_special_characters( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test metadata index updates with special characters in keys and values.""" + record_id = "special-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "key:with:colons": "value:with:colons", + "key-with-dashes": "value-with-dashes", + "key_with_underscores": "value_with_underscores", + "key with spaces": "value with spaces", + } + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify all metadata indexes were created with special characters preserved + assert mock_glide_client.sadd.call_count == 4 + mock_glide_client.sadd.assert_any_call( + "metadata:key:with:colons:value:with:colons", [record_id] + ) + mock_glide_client.sadd.assert_any_call( + "metadata:key-with-dashes:value-with-dashes", [record_id] + ) + mock_glide_client.sadd.assert_any_call( + "metadata:key_with_underscores:value_with_underscores", [record_id] + ) + mock_glide_client.sadd.assert_any_call( + "metadata:key with spaces:value with spaces", [record_id] + ) + + @pytest.mark.asyncio + async def test_update_indexes_with_mixed_data_types_in_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test metadata index updates with mixed data types.""" + record_id = "mixed-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "string_key": "string_value", + "int_key": 123, + "float_key": 45.67, + "bool_key": True, + } + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify all metadata indexes were created with proper type conversion + assert mock_glide_client.sadd.call_count == 4 + mock_glide_client.sadd.assert_any_call("metadata:string_key:string_value", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:int_key:123", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:float_key:45.67", [record_id]) + mock_glide_client.sadd.assert_any_call("metadata:bool_key:True", [record_id]) + + @pytest.mark.asyncio + async def test_update_indexes_with_all_fields_populated( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test index updates with scope, categories, and metadata all populated.""" + record_id = "full-record" + scope = "/agent/task" + categories = ["planning", "execution"] + metadata = {"agent_id": "agent-1", "priority": "high"} + timestamp = 1704067200.0 + + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Verify scope index was updated + mock_glide_client.zadd.assert_called_once_with( + "scope:/agent/task", {record_id: timestamp} + ) + + # Verify all indexes were updated (2 categories + 2 metadata = 4 sadd calls) + assert mock_glide_client.sadd.call_count == 4 + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_simple_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record from indexes with simple scope.""" + record_id = "test-record-123" + scope = "/agent/task" + categories = ["planning"] + metadata = {"agent_id": "agent-1"} + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify record was removed from scope index + mock_glide_client.zrem.assert_called_once_with("scope:/agent/task", [record_id]) + + # Verify record was removed from category index + mock_glide_client.srem.assert_any_call("category:planning", [record_id]) + + # Verify record was removed from metadata index + mock_glide_client.srem.assert_any_call("metadata:agent_id:agent-1", [record_id]) + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_root_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record from indexes with root scope '/'.""" + record_id = "root-record" + scope = "/" + categories: list[str] = [] + metadata: dict[str, str] = {} + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify record was removed from root scope index + mock_glide_client.zrem.assert_called_once_with("scope:/", [record_id]) + + # Verify no category or metadata removals + assert mock_glide_client.srem.call_count == 0 + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_multiple_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record from multiple category indexes.""" + record_id = "multi-category-record" + scope = "/test" + categories = ["planning", "execution", "review"] + metadata: dict[str, str] = {} + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify record was removed from all category indexes + assert mock_glide_client.srem.call_count == 3 + mock_glide_client.srem.assert_any_call("category:planning", [record_id]) + mock_glide_client.srem.assert_any_call("category:execution", [record_id]) + mock_glide_client.srem.assert_any_call("category:review", [record_id]) + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_empty_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record with empty categories list.""" + record_id = "no-categories-record" + scope = "/test" + categories: list[str] = [] + metadata: dict[str, str] = {} + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify scope removal + mock_glide_client.zrem.assert_called_once() + + # Verify no category removals + assert mock_glide_client.srem.call_count == 0 + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_multiple_metadata_entries( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record from multiple metadata indexes.""" + record_id = "multi-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "agent_id": "agent-1", + "task_type": "planning", + "priority": "high", + } + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify record was removed from all metadata indexes + assert mock_glide_client.srem.call_count == 3 + mock_glide_client.srem.assert_any_call("metadata:agent_id:agent-1", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:task_type:planning", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:priority:high", [record_id]) + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_empty_metadata( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record with empty metadata dict.""" + record_id = "no-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata: dict[str, str] = {} + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify scope removal + mock_glide_client.zrem.assert_called_once() + + # Verify no metadata removals + assert mock_glide_client.srem.call_count == 0 + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_numeric_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record with numeric metadata values (converted to strings).""" + record_id = "numeric-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "count": 42, + "score": 3.14, + } + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify metadata values are converted to strings for removal + assert mock_glide_client.srem.call_count == 2 + mock_glide_client.srem.assert_any_call("metadata:count:42", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:score:3.14", [record_id]) + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_boolean_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record with boolean metadata values (converted to strings).""" + record_id = "boolean-metadata-record" + scope = "/test" + categories: list[str] = [] + metadata = { + "is_active": True, + "is_complete": False, + } + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify boolean values are converted to strings for removal + assert mock_glide_client.srem.call_count == 2 + mock_glide_client.srem.assert_any_call("metadata:is_active:True", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:is_complete:False", [record_id]) + + @pytest.mark.asyncio + async def test_remove_from_indexes_with_all_fields_populated( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test removing record from all index structures.""" + record_id = "full-record" + scope = "/agent/task" + categories = ["planning", "execution"] + metadata = {"agent_id": "agent-1", "priority": "high"} + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify scope removal + mock_glide_client.zrem.assert_called_once_with("scope:/agent/task", [record_id]) + + # Verify all removals (2 categories + 2 metadata = 4 srem calls) + assert mock_glide_client.srem.call_count == 4 + + @pytest.mark.asyncio + async def test_remove_from_indexes_cleans_all_structures( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that remove_from_indexes cleans all index structures completely.""" + record_id = "cleanup-record" + scope = "/agent/task/subtask" + categories = ["planning", "execution", "review"] + metadata = { + "agent_id": "agent-1", + "task_type": "analysis", + "priority": 5, + "active": True, + } + + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Verify scope index cleanup + mock_glide_client.zrem.assert_called_once_with( + "scope:/agent/task/subtask", [record_id] + ) + + # Verify category index cleanup (3 categories) + mock_glide_client.srem.assert_any_call("category:planning", [record_id]) + mock_glide_client.srem.assert_any_call("category:execution", [record_id]) + mock_glide_client.srem.assert_any_call("category:review", [record_id]) + + # Verify metadata index cleanup (4 metadata entries) + mock_glide_client.srem.assert_any_call("metadata:agent_id:agent-1", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:task_type:analysis", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:priority:5", [record_id]) + mock_glide_client.srem.assert_any_call("metadata:active:True", [record_id]) + + # Verify total number of removals (3 categories + 4 metadata = 7 srem calls) + assert mock_glide_client.srem.call_count == 7 + + @pytest.mark.asyncio + async def test_update_then_remove_indexes_consistency( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that update and remove operations use consistent key naming.""" + record_id = "consistency-record" + scope = "/test/scope" + categories = ["cat1", "cat2"] + metadata = {"key1": "value1", "key2": 123} + timestamp = 1704067200.0 + + # Update indexes + await valkey_storage._update_indexes( + record_id, scope, categories, metadata, timestamp + ) + + # Capture the keys used in update + zadd_key = mock_glide_client.zadd.call_args[0][0] + sadd_keys = [call[0][0] for call in mock_glide_client.sadd.call_args_list] + + # Reset mocks + mock_glide_client.reset_mock() + + # Remove from indexes + await valkey_storage._remove_from_indexes( + record_id, scope, categories, metadata + ) + + # Capture the keys used in remove + zrem_key = mock_glide_client.zrem.call_args[0][0] + srem_keys = [call[0][0] for call in mock_glide_client.srem.call_args_list] + + # Verify scope keys match + assert zadd_key == zrem_key + + # Verify category and metadata keys match + assert set(sadd_keys) == set(srem_keys) diff --git a/lib/crewai/tests/memory/storage/test_valkey_storage_errors.py b/lib/crewai/tests/memory/storage/test_valkey_storage_errors.py new file mode 100644 index 0000000000..44e4f73be3 --- /dev/null +++ b/lib/crewai/tests/memory/storage/test_valkey_storage_errors.py @@ -0,0 +1,343 @@ +"""Tests for ValkeyStorage error handling.""" + +from __future__ import annotations + +import asyncio +import json +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +import pytest + +from crewai.memory.storage.valkey_storage import ValkeyStorage +from crewai.memory.types import MemoryRecord + + +@pytest.fixture +def mock_glide_client() -> AsyncMock: + """Create a mock GlideClient for testing.""" + client = AsyncMock() + client.custom_command = AsyncMock() + client.zadd = AsyncMock() + client.sadd = AsyncMock() + client.hgetall = AsyncMock(return_value={}) + client.close = AsyncMock() + return client + + +@pytest.fixture +def valkey_storage(mock_glide_client: AsyncMock) -> ValkeyStorage: + """Create a ValkeyStorage instance with mocked client.""" + storage = ValkeyStorage(host="localhost", port=6379, db=0) + + # Mock the client creation to return our mock + async def mock_create_client() -> AsyncMock: + storage._client = mock_glide_client + return mock_glide_client + + storage._get_client = mock_create_client # type: ignore[method-assign] + return storage + + +class TestConnectionErrorRetry: + """Tests for connection error retry with exponential backoff.""" + + @pytest.mark.asyncio + async def test_retry_operation_basic_functionality( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that _retry_operation can execute a simple callable.""" + async def simple_operation() -> str: + return "success" + + result = await valkey_storage._retry_operation( + simple_operation, max_retries=5 + ) + + assert result == "success" + + @pytest.mark.asyncio + async def test_retry_operation_propagates_non_connection_errors( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that _retry_operation propagates non-connection errors immediately.""" + async def failing_operation() -> None: + raise ValueError("Not a connection error") + + with pytest.raises(ValueError, match="Not a connection error"): + await valkey_storage._retry_operation( + failing_operation, max_retries=5 + ) + + @pytest.mark.asyncio + async def test_connection_error_logs_at_debug_level( + self, valkey_storage: ValkeyStorage, caplog: pytest.LogCaptureFixture + ) -> None: + """Test that connection errors are logged at debug level.""" + from glide import ClosingError + + async def failing_once() -> None: + raise ClosingError("Connection lost") + + with pytest.raises(ClosingError): + await valkey_storage._retry_operation(failing_once, max_retries=0) + + # Verify error logging + assert "Operation failed after 0 retries" in caplog.text + + @pytest.mark.asyncio + async def test_closing_error_is_caught( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that ClosingError from glide is caught and retried.""" + from glide import ClosingError + + async def failing_operation() -> None: + raise ClosingError("Connection lost") + + # Should raise after max_retries + with pytest.raises(ClosingError, match="Connection lost"): + await valkey_storage._retry_operation( + failing_operation, max_retries=0 + ) + + @pytest.mark.asyncio + async def test_connection_error_is_caught( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that ConnectionError is caught and retried.""" + async def failing_operation() -> None: + raise ConnectionError("Network error") + + # Should raise after max_retries + with pytest.raises(ConnectionError, match="Network error"): + await valkey_storage._retry_operation( + failing_operation, max_retries=0 + ) + + +class TestSerializationErrors: + """Tests for serialization error handling.""" + + def test_serialization_error_raises_descriptive_exception( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that serialization errors raise descriptive ValueError.""" + # Create a record with non-serializable metadata + record = MemoryRecord( + id="test-id", + content="test content", + scope="/test", + categories=["test"], + metadata={"bad_key": object()}, # Non-serializable object + importance=0.5, + created_at=datetime.now(), + last_accessed=datetime.now(), + embedding=[0.1, 0.2, 0.3], + ) + + # Should raise ValueError with descriptive message + with pytest.raises(ValueError, match="Failed to serialize record test-id"): + valkey_storage._record_to_dict(record) + + def test_serialization_error_includes_cause( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that serialization error includes the original exception as cause.""" + # Create a mock record that will fail during JSON serialization + # We need to bypass Pydantic validation, so we'll patch json.dumps + record = MemoryRecord( + id="test-id-2", + content="test content", + scope="/test", + categories=["valid"], + metadata={"key": "value"}, + importance=0.5, + created_at=datetime.now(), + last_accessed=datetime.now(), + embedding=[0.1, 0.2, 0.3], + ) + + # Patch json.dumps to raise an error + with patch("json.dumps", side_effect=TypeError("Cannot serialize")): + with pytest.raises(ValueError) as exc_info: + valkey_storage._record_to_dict(record) + + # Verify the exception has a cause + assert exc_info.value.__cause__ is not None + assert isinstance(exc_info.value.__cause__, TypeError) + + +class TestDeserializationErrors: + """Tests for deserialization error handling.""" + + def test_deserialization_error_logs_and_returns_none( + self, valkey_storage: ValkeyStorage, caplog: pytest.LogCaptureFixture + ) -> None: + """Test that deserialization errors log error and return None.""" + # Create malformed data (missing required fields) + malformed_data = { + "id": "test-id", + "content": "test content", + # Missing scope, categories, metadata, etc. + } + + # Should return None and log error + result = valkey_storage._dict_to_record(malformed_data) + + assert result is None + assert "Failed to deserialize record test-id" in caplog.text + + def test_deserialization_with_invalid_json_categories_uses_tag_fallback( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that non-JSON categories fall back to TAG (comma-separated) parsing.""" + # Create data with non-JSON categories string + data = { + "id": "test-id-json", + "content": "test content", + "scope": "/test", + "categories": "not valid json [", # Not JSON, treated as TAG format + "metadata": "{}", + "importance": "0.5", + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "source": "", + "private": "false", + } + + result = valkey_storage._dict_to_record(data) + + # TAG fallback: comma-split produces the raw string as a single category + assert result is not None + assert result.id == "test-id-json" + assert result.categories == ["not valid json ["] + + def test_deserialization_with_invalid_datetime_returns_none( + self, valkey_storage: ValkeyStorage, caplog: pytest.LogCaptureFixture + ) -> None: + """Test that invalid datetime format returns None.""" + # Create data with invalid datetime + invalid_data = { + "id": "test-id-datetime", + "content": "test content", + "scope": "/test", + "categories": '["test"]', + "metadata": "{}", + "importance": "0.5", + "created_at": "not a datetime", # Invalid datetime + "last_accessed": "2024-01-01T12:00:00", + "source": "", + "private": "false", + } + + result = valkey_storage._dict_to_record(invalid_data) + + assert result is None + assert "Failed to deserialize record test-id-datetime" in caplog.text + + def test_deserialization_with_invalid_float_returns_none( + self, valkey_storage: ValkeyStorage, caplog: pytest.LogCaptureFixture + ) -> None: + """Test that invalid float importance returns None.""" + # Create data with invalid float + invalid_data = { + "id": "test-id-float", + "content": "test content", + "scope": "/test", + "categories": '["test"]', + "metadata": "{}", + "importance": "not a float", # Invalid float + "created_at": "2024-01-01T12:00:00", + "last_accessed": "2024-01-01T12:00:00", + "source": "", + "private": "false", + } + + result = valkey_storage._dict_to_record(invalid_data) + + assert result is None + assert "Failed to deserialize record test-id-float" in caplog.text + + def test_deserialization_with_bytes_keys_uses_tag_fallback( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that deserialization handles bytes keys with non-JSON categories via TAG fallback.""" + # Create data with bytes keys (as returned by Valkey) + bytes_data = { + b"id": b"test-id-bytes", + b"content": b"test content", + b"scope": b"/test", + b"categories": b"invalid json [", # Not JSON, treated as TAG format + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T12:00:00", + b"last_accessed": b"2024-01-01T12:00:00", + } + + result = valkey_storage._dict_to_record(bytes_data) + + # TAG fallback: comma-split produces the raw string as a single category + assert result is not None + assert result.id == "test-id-bytes" + assert result.categories == ["invalid json ["] + + +class TestRetryBehaviorIntegration: + """Integration tests demonstrating retry behavior patterns.""" + + @pytest.mark.asyncio + async def test_mock_client_operation_with_retry_pattern( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test demonstrating how retry would work with client operations.""" + from glide import ClosingError + + # Mock a client operation that fails once + mock_glide_client.hgetall.side_effect = [ + ClosingError("Connection lost"), + { + b"id": b"test-id", + b"content": b"test content", + b"scope": b"/test", + b"categories": b'["test"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T12:00:00", + b"last_accessed": b"2024-01-01T12:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + }, + ] + + # First call fails, second succeeds + with pytest.raises(ClosingError): + await mock_glide_client.hgetall("record:test-id") + + # Second call succeeds + result = await mock_glide_client.hgetall("record:test-id") + assert result is not None + + @pytest.mark.asyncio + async def test_serialization_error_not_retried( + self, valkey_storage: ValkeyStorage + ) -> None: + """Test that serialization errors are not retried (they're not connection errors).""" + # Create a record with non-serializable data + record = MemoryRecord( + id="test-id", + content="test content", + scope="/test", + categories=["test"], + metadata={"bad": object()}, + importance=0.5, + created_at=datetime.now(), + last_accessed=datetime.now(), + embedding=[0.1, 0.2, 0.3], + ) + + # Serialization error should not be retried + with pytest.raises(ValueError, match="Failed to serialize"): + valkey_storage._record_to_dict(record) diff --git a/lib/crewai/tests/memory/storage/test_valkey_storage_scope.py b/lib/crewai/tests/memory/storage/test_valkey_storage_scope.py new file mode 100644 index 0000000000..7d964a3d9d --- /dev/null +++ b/lib/crewai/tests/memory/storage/test_valkey_storage_scope.py @@ -0,0 +1,1109 @@ +"""Tests for ValkeyStorage scope operations.""" + +from __future__ import annotations + +from datetime import datetime +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest + +from crewai.memory.storage.valkey_storage import ValkeyStorage +from crewai.memory.types import MemoryRecord, ScopeInfo + + +@pytest.fixture +def mock_glide_client() -> AsyncMock: + """Create a mock GlideClient for testing.""" + client = AsyncMock() + client.custom_command = AsyncMock() + client.zadd = AsyncMock() + client.sadd = AsyncMock() + client.zrem = AsyncMock() + client.srem = AsyncMock() + client.hgetall = AsyncMock(return_value={}) + client.scan = AsyncMock() + client.smembers = AsyncMock(return_value=[]) + client.scard = AsyncMock(return_value=0) + client.delete = AsyncMock() + client.close = AsyncMock() + return client + + +@pytest.fixture +def valkey_storage(mock_glide_client: AsyncMock) -> ValkeyStorage: + """Create a ValkeyStorage instance with mocked client.""" + storage = ValkeyStorage(host="localhost", port=6379, db=0) + + # Mock the client creation to return our mock + async def mock_create_client() -> AsyncMock: + storage._client = mock_glide_client + return mock_glide_client + + storage._get_client = mock_create_client # type: ignore[method-assign] + return storage + + +class TestValkeyStorageListRecords: + """Tests for list_records operation.""" + + @pytest.mark.asyncio + async def test_list_records_returns_newest_first( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that list_records returns records ordered by created_at descending.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", # cursor + [b"scope:/test"], # keys + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3"], # ZRANGE response + ] + + # Mock hgetall to return record data + def mock_hgetall(key: str) -> dict[bytes, bytes]: + if key == "record:record-1": + return { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-2": + return { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-02T10:00:00", + b"last_accessed": b"2024-01-02T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-3": + return { + b"id": b"record-3", + b"content": b"Content 3", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-03T10:00:00", + b"last_accessed": b"2024-01-03T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + return {} + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # List records + records = await valkey_storage._alist_records(scope_prefix="/test") + + # Verify records are ordered newest first + assert len(records) == 3 + assert records[0].id == "record-3" # Newest + assert records[1].id == "record-2" + assert records[2].id == "record-1" # Oldest + + @pytest.mark.asyncio + async def test_list_records_with_pagination_limit_only( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test pagination with limit only (no offset).""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3", b"record-4", b"record-5"], + ] + + # Mock hgetall to return record data + def mock_hgetall(key: str) -> dict[bytes, bytes]: + record_id = key.split(":")[-1] + day = int(record_id.split("-")[-1]) + return { + b"id": record_id.encode(), + b"content": f"Content {day}".encode(), + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": f"2024-01-0{day}T10:00:00".encode(), + b"last_accessed": f"2024-01-0{day}T10:00:00".encode(), + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # List records with limit only + records = await valkey_storage._alist_records(scope_prefix="/test", limit=3) + + # Verify limit works (take first 3) + assert len(records) == 3 + assert records[0].id == "record-5" # Newest + assert records[1].id == "record-4" + assert records[2].id == "record-3" + + @pytest.mark.asyncio + async def test_list_records_with_pagination_offset_only( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test pagination with offset only (default limit).""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3"], + ] + + # Mock hgetall to return record data + def mock_hgetall(key: str) -> dict[bytes, bytes]: + record_id = key.split(":")[-1] + day = int(record_id.split("-")[-1]) + return { + b"id": record_id.encode(), + b"content": f"Content {day}".encode(), + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": f"2024-01-0{day}T10:00:00".encode(), + b"last_accessed": f"2024-01-0{day}T10:00:00".encode(), + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # List records with offset only + records = await valkey_storage._alist_records(scope_prefix="/test", offset=1) + + # Verify offset works (skip first 1) + assert len(records) == 2 + assert records[0].id == "record-2" + assert records[1].id == "record-1" + + @pytest.mark.asyncio + async def test_list_records_with_pagination_limit_and_offset( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test pagination with both limit and offset.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3", b"record-4", b"record-5"], + ] + + # Mock hgetall to return record data + def mock_hgetall(key: str) -> dict[bytes, bytes]: + record_id = key.split(":")[-1] + day = int(record_id.split("-")[-1]) + return { + b"id": record_id.encode(), + b"content": f"Content {day}".encode(), + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": f"2024-01-0{day}T10:00:00".encode(), + b"last_accessed": f"2024-01-0{day}T10:00:00".encode(), + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # List records with pagination + records = await valkey_storage._alist_records( + scope_prefix="/test", limit=2, offset=1 + ) + + # Verify pagination works (skip 1, take 2) + assert len(records) == 2 + assert records[0].id == "record-4" # Second newest + assert records[1].id == "record-3" # Third newest + + @pytest.mark.asyncio + async def test_list_records_with_large_offset( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test pagination with offset beyond available records.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2"], + ] + + # Mock hgetall to return record data + def mock_hgetall(key: str) -> dict[bytes, bytes]: + record_id = key.split(":")[-1] + day = int(record_id.split("-")[-1]) + return { + b"id": record_id.encode(), + b"content": f"Content {day}".encode(), + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": f"2024-01-0{day}T10:00:00".encode(), + b"last_accessed": f"2024-01-0{day}T10:00:00".encode(), + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # List records with large offset + records = await valkey_storage._alist_records(scope_prefix="/test", offset=10) + + # Verify empty list when offset exceeds available records + assert len(records) == 0 + + @pytest.mark.asyncio + async def test_list_records_empty_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_records returns empty list for empty scope.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/empty"], + ) + + # Mock ZRANGE to return no record IDs + mock_glide_client.custom_command.side_effect = [ + [], # No records + ] + + # List records + records = await valkey_storage._alist_records(scope_prefix="/empty") + + # Verify empty list + assert len(records) == 0 + + def test_list_records_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync list_records wrapper calls async implementation.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1"], + ] + + # Mock hgetall to return record data + mock_glide_client.hgetall.return_value = { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + + # Call sync wrapper + records = valkey_storage.list_records(scope_prefix="/test") + + # Verify it works + assert len(records) == 1 + assert records[0].id == "record-1" + + +class TestValkeyStorageGetScopeInfo: + """Tests for get_scope_info operation.""" + + @pytest.mark.asyncio + async def test_get_scope_info_returns_accurate_counts( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that get_scope_info returns accurate record counts and metadata.""" + # Mock scan to return scope keys + mock_glide_client.scan.side_effect = [ + (b"0", [b"scope:/test", b"scope:/test/sub"]), # First scan + (b"0", [b"scope:/test", b"scope:/test/sub"]), # Second scan for child scopes + ] + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2"], # Records in /test + [b"record-3"], # Records in /test/sub + ] + + # Mock hgetall to return record data + def mock_hgetall(key: str) -> dict[bytes, bytes]: + if key == "record:record-1": + return { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/test", + b"categories": b'["planning"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-2": + return { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/test", + b"categories": b'["execution"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-03T10:00:00", + b"last_accessed": b"2024-01-03T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-3": + return { + b"id": b"record-3", + b"content": b"Content 3", + b"scope": b"/test/sub", + b"categories": b'["planning"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-02T10:00:00", + b"last_accessed": b"2024-01-02T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + return {} + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # Get scope info + info = await valkey_storage._aget_scope_info("/test") + + # Verify scope info + assert info.path == "/test" + assert info.record_count == 3 # All records in /test and subscopes + assert set(info.categories) == {"execution", "planning"} + assert info.oldest_record == datetime(2024, 1, 1, 10, 0, 0) + assert info.newest_record == datetime(2024, 1, 3, 10, 0, 0) + assert "/test/sub" in info.child_scopes + + @pytest.mark.asyncio + async def test_get_scope_info_returns_accurate_timestamps( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that get_scope_info returns accurate oldest and newest timestamps.""" + # Mock scan to return scope keys + mock_glide_client.scan.side_effect = [ + (b"0", [b"scope:/test"]), # First scan + (b"0", [b"scope:/test"]), # Second scan for child scopes + ] + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3"], + ] + + # Mock hgetall to return record data with different timestamps + def mock_hgetall(key: str) -> dict[bytes, bytes]: + if key == "record:record-1": + return { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-15T10:00:00", + b"last_accessed": b"2024-01-15T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-2": + return { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", # Oldest + b"last_accessed": b"2024-01-01T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-3": + return { + b"id": b"record-3", + b"content": b"Content 3", + b"scope": b"/test", + b"categories": b"[]", + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-20T10:00:00", # Newest + b"last_accessed": b"2024-01-20T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + return {} + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # Get scope info + info = await valkey_storage._aget_scope_info("/test") + + # Verify timestamps + assert info.oldest_record == datetime(2024, 1, 1, 10, 0, 0) + assert info.newest_record == datetime(2024, 1, 20, 10, 0, 0) + + @pytest.mark.asyncio + async def test_get_scope_info_empty_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test get_scope_info returns empty info for empty scope.""" + # Mock scan to return no matching scopes + mock_glide_client.scan.return_value = (b"0", []) + + # Get scope info for empty scope + info = await valkey_storage._aget_scope_info("/empty") + + # Verify empty scope info + assert info.path == "/empty" + assert info.record_count == 0 + assert info.categories == [] + assert info.oldest_record is None + assert info.newest_record is None + assert info.child_scopes == [] + + @pytest.mark.asyncio + async def test_get_scope_info_with_multiple_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test get_scope_info aggregates categories from all records.""" + # Mock scan to return scope keys + mock_glide_client.scan.side_effect = [ + (b"0", [b"scope:/test"]), # First scan + (b"0", [b"scope:/test"]), # Second scan for child scopes + ] + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3"], + ] + + # Mock hgetall to return record data with various categories + def mock_hgetall(key: str) -> dict[bytes, bytes]: + if key == "record:record-1": + return { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/test", + b"categories": b'["planning", "execution"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-2": + return { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/test", + b"categories": b'["review", "planning"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-02T10:00:00", + b"last_accessed": b"2024-01-02T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-3": + return { + b"id": b"record-3", + b"content": b"Content 3", + b"scope": b"/test", + b"categories": b'["analysis"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-03T10:00:00", + b"last_accessed": b"2024-01-03T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + return {} + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # Get scope info + info = await valkey_storage._aget_scope_info("/test") + + # Verify all unique categories are collected and sorted + assert set(info.categories) == {"analysis", "execution", "planning", "review"} + assert info.categories == ["analysis", "execution", "planning", "review"] # Sorted + + def test_get_scope_info_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync get_scope_info wrapper calls async implementation.""" + # Mock scan to return no matching scopes + mock_glide_client.scan.return_value = (b"0", []) + + # Call sync wrapper + info = valkey_storage.get_scope_info("/test") + + # Verify it works + assert info.path == "/test" + assert info.record_count == 0 + + +class TestValkeyStorageListScopes: + """Tests for list_scopes operation.""" + + @pytest.mark.asyncio + async def test_list_scopes_returns_immediate_children_only( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that list_scopes returns only immediate children, not grandchildren.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [ + b"scope:/agent", + b"scope:/agent/task", + b"scope:/agent/task/subtask", + b"scope:/crew", + ], + ) + + # List scopes under root + scopes = await valkey_storage._alist_scopes("/") + + # Verify only immediate children are returned + assert len(scopes) == 2 + assert "/agent" in scopes + assert "/crew" in scopes + assert "/agent/task" not in scopes # Grandchild not included + + @pytest.mark.asyncio + async def test_list_scopes_with_parent( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_scopes with specific parent path.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [ + b"scope:/agent", + b"scope:/agent/task", + b"scope:/agent/task/subtask", + b"scope:/agent/memory", + ], + ) + + # List scopes under /agent + scopes = await valkey_storage._alist_scopes("/agent") + + # Verify only immediate children of /agent are returned + assert len(scopes) == 2 + assert "/agent/task" in scopes + assert "/agent/memory" in scopes + assert "/agent/task/subtask" not in scopes # Grandchild not included + + @pytest.mark.asyncio + async def test_list_scopes_returns_sorted_order( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that list_scopes returns scopes in sorted order.""" + # Mock scan to return scope keys in random order + mock_glide_client.scan.return_value = ( + b"0", + [ + b"scope:/zebra", + b"scope:/alpha", + b"scope:/beta", + b"scope:/gamma", + ], + ) + + # List scopes under root + scopes = await valkey_storage._alist_scopes("/") + + # Verify scopes are sorted + assert scopes == ["/alpha", "/beta", "/gamma", "/zebra"] + + @pytest.mark.asyncio + async def test_list_scopes_empty_parent( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_scopes returns empty list when parent has no children.""" + # Mock scan to return scope keys that don't match parent + mock_glide_client.scan.return_value = ( + b"0", + [ + b"scope:/agent", + b"scope:/crew", + ], + ) + + # List scopes under /other (no children) + scopes = await valkey_storage._alist_scopes("/other") + + # Verify empty list + assert len(scopes) == 0 + + @pytest.mark.asyncio + async def test_list_scopes_with_deep_hierarchy( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_scopes with deep scope hierarchy.""" + # Mock scan to return scope keys with deep nesting + mock_glide_client.scan.return_value = ( + b"0", + [ + b"scope:/a", + b"scope:/a/b", + b"scope:/a/b/c", + b"scope:/a/b/c/d", + b"scope:/a/x", + ], + ) + + # List scopes under /a/b + scopes = await valkey_storage._alist_scopes("/a/b") + + # Verify only immediate children are returned + assert len(scopes) == 1 + assert "/a/b/c" in scopes + assert "/a/b/c/d" not in scopes # Grandchild not included + + def test_list_scopes_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync list_scopes wrapper calls async implementation.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/agent", b"scope:/crew"], + ) + + # Call sync wrapper + scopes = valkey_storage.list_scopes("/") + + # Verify it works + assert len(scopes) == 2 + assert "/agent" in scopes + assert "/crew" in scopes + + +class TestValkeyStorageListCategories: + """Tests for list_categories operation.""" + + @pytest.mark.asyncio + async def test_list_categories_global_returns_accurate_counts( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_categories returns accurate global category counts.""" + # Mock scan to return category keys + mock_glide_client.scan.return_value = ( + b"0", + [b"category:planning", b"category:execution", b"category:review"], + ) + + # Mock scard to return category counts + def mock_scard(key: str) -> int: + if key == "category:planning": + return 5 + elif key == "category:execution": + return 3 + elif key == "category:review": + return 2 + return 0 + + mock_glide_client.scard.side_effect = mock_scard + + # List categories globally + categories = await valkey_storage._alist_categories(scope_prefix=None) + + # Verify category counts + assert categories == {"planning": 5, "execution": 3, "review": 2} + + @pytest.mark.asyncio + async def test_list_categories_with_scope_returns_accurate_counts( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_categories with scope filtering returns accurate counts.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2", b"record-3"], + ] + + # Mock hgetall to return record data with categories + def mock_hgetall(key: str) -> dict[bytes, bytes]: + if key == "record:record-1": + return { + b"id": b"record-1", + b"content": b"Content 1", + b"scope": b"/test", + b"categories": b'["planning", "execution"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-01T10:00:00", + b"last_accessed": b"2024-01-01T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-2": + return { + b"id": b"record-2", + b"content": b"Content 2", + b"scope": b"/test", + b"categories": b'["planning"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-02T10:00:00", + b"last_accessed": b"2024-01-02T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + elif key == "record:record-3": + return { + b"id": b"record-3", + b"content": b"Content 3", + b"scope": b"/test", + b"categories": b'["execution"]', + b"metadata": b"{}", + b"importance": b"0.5", + b"created_at": b"2024-01-03T10:00:00", + b"last_accessed": b"2024-01-03T10:00:00", + b"source": b"", + b"private": b"false", + b"embedding": b"", + } + return {} + + mock_glide_client.hgetall.side_effect = mock_hgetall + + # List categories in scope + categories = await valkey_storage._alist_categories(scope_prefix="/test") + + # Verify category counts + assert categories == {"planning": 2, "execution": 2} + + @pytest.mark.asyncio + async def test_list_categories_empty_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_categories returns empty dict for empty scope.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/empty"], + ) + + # Mock ZRANGE to return no record IDs + mock_glide_client.custom_command.side_effect = [ + [], # No records + ] + + # List categories in empty scope + categories = await valkey_storage._alist_categories(scope_prefix="/empty") + + # Verify empty dict + assert categories == {} + + @pytest.mark.asyncio + async def test_list_categories_global_empty( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test list_categories returns empty dict when no categories exist.""" + # Mock scan to return no category keys + mock_glide_client.scan.return_value = (b"0", []) + + # List categories globally + categories = await valkey_storage._alist_categories(scope_prefix=None) + + # Verify empty dict + assert categories == {} + + def test_list_categories_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync list_categories wrapper calls async implementation.""" + # Mock scan to return category keys + mock_glide_client.scan.return_value = ( + b"0", + [b"category:planning"], + ) + + # Mock scard to return category count + mock_glide_client.scard.return_value = 5 + + # Call sync wrapper + categories = valkey_storage.list_categories(scope_prefix=None) + + # Verify it works + assert categories == {"planning": 5} + + +class TestValkeyStorageCount: + """Tests for count operation.""" + + @pytest.mark.asyncio + async def test_count_all_records_returns_correct_total( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test count returns correct total count across all scopes.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test1", b"scope:/test2"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2"], # /test1 + [b"record-3", b"record-4", b"record-5"], # /test2 + ] + + # Count all records + count = await valkey_storage._acount(scope_prefix=None) + + # Verify total count + assert count == 5 + + @pytest.mark.asyncio + async def test_count_with_scope_returns_correct_total( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test count with scope filtering returns correct total.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test", b"scope:/test/sub"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2"], # /test + [b"record-3"], # /test/sub + ] + + # Count records in scope + count = await valkey_storage._acount(scope_prefix="/test") + + # Verify count includes subscopes + assert count == 3 + + @pytest.mark.asyncio + async def test_count_empty_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test count returns 0 for empty scope.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/empty"], + ) + + # Mock ZRANGE to return no record IDs + mock_glide_client.custom_command.side_effect = [ + [], # No records + ] + + # Count records in empty scope + count = await valkey_storage._acount(scope_prefix="/empty") + + # Verify count is 0 + assert count == 0 + + @pytest.mark.asyncio + async def test_count_deduplicates_records( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test count deduplicates records that appear in multiple scopes.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test1", b"scope:/test2"], + ) + + # Mock ZRANGE to return overlapping record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2"], # /test1 + [b"record-2", b"record-3"], # /test2 (record-2 appears in both) + ] + + # Count all records + count = await valkey_storage._acount(scope_prefix=None) + + # Verify count deduplicates (3 unique records, not 4) + assert count == 3 + + def test_count_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync count wrapper calls async implementation.""" + # Mock scan to return scope keys + mock_glide_client.scan.return_value = ( + b"0", + [b"scope:/test"], + ) + + # Mock ZRANGE to return record IDs + mock_glide_client.custom_command.side_effect = [ + [b"record-1", b"record-2"], + ] + + # Call sync wrapper + count = valkey_storage.count(scope_prefix="/test") + + # Verify it works + assert count == 2 + + +class TestValkeyStorageReset: + """Tests for reset operation.""" + + @pytest.mark.asyncio + async def test_reset_clears_all_records( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test reset delegates to adelete to clear all records.""" + # Mock adelete to track if it was called + original_adelete = valkey_storage.adelete + adelete_called = False + adelete_args = None + + async def mock_adelete(*args: object, **kwargs: object) -> int: + nonlocal adelete_called, adelete_args + adelete_called = True + adelete_args = kwargs + return 0 + + valkey_storage.adelete = mock_adelete # type: ignore[method-assign] + + # Reset all records + await valkey_storage._areset(scope_prefix=None) + + # Verify adelete was called with correct arguments + assert adelete_called + assert adelete_args == {"scope_prefix": None} + + # Restore original method + valkey_storage.adelete = original_adelete # type: ignore[method-assign] + + @pytest.mark.asyncio + async def test_reset_with_scope_clears_scope_records( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test reset with scope delegates to adelete with scope_prefix.""" + # Mock adelete to track if it was called + original_adelete = valkey_storage.adelete + adelete_called = False + adelete_args = None + + async def mock_adelete(*args: object, **kwargs: object) -> int: + nonlocal adelete_called, adelete_args + adelete_called = True + adelete_args = kwargs + return 0 + + valkey_storage.adelete = mock_adelete # type: ignore[method-assign] + + # Reset records in scope + await valkey_storage._areset(scope_prefix="/test") + + # Verify adelete was called with correct arguments + assert adelete_called + assert adelete_args == {"scope_prefix": "/test"} + + # Restore original method + valkey_storage.adelete = original_adelete # type: ignore[method-assign] + + def test_reset_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync reset wrapper calls async implementation.""" + # Mock adelete to track if it was called + original_adelete = valkey_storage.adelete + adelete_called = False + + async def mock_adelete(*args: object, **kwargs: object) -> int: + nonlocal adelete_called + adelete_called = True + return 0 + + valkey_storage.adelete = mock_adelete # type: ignore[method-assign] + + # Call sync wrapper + valkey_storage.reset(scope_prefix="/test") + + # Verify adelete was called + assert adelete_called + + # Restore original method + valkey_storage.adelete = original_adelete # type: ignore[method-assign] diff --git a/lib/crewai/tests/memory/storage/test_valkey_storage_search.py b/lib/crewai/tests/memory/storage/test_valkey_storage_search.py new file mode 100644 index 0000000000..3d323a12b1 --- /dev/null +++ b/lib/crewai/tests/memory/storage/test_valkey_storage_search.py @@ -0,0 +1,1175 @@ +"""Tests for ValkeyStorage vector search operation.""" + +from __future__ import annotations + +from datetime import datetime +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest + +from crewai.memory.storage.valkey_storage import ValkeyStorage +from crewai.memory.types import MemoryRecord + + +@pytest.fixture +def mock_glide_client() -> AsyncMock: + """Create a mock GlideClient for testing.""" + client = AsyncMock() + client.custom_command = AsyncMock() + client.zadd = AsyncMock() + client.sadd = AsyncMock() + client.hgetall = AsyncMock(return_value={}) + client.close = AsyncMock() + return client + + +@pytest.fixture +def valkey_storage(mock_glide_client: AsyncMock) -> ValkeyStorage: + """Create a ValkeyStorage instance with mocked client.""" + storage = ValkeyStorage(host="localhost", port=6379, db=0) + + # Mock the client creation to return our mock + async def mock_create_client() -> AsyncMock: + storage._client = mock_glide_client + return mock_glide_client + + storage._get_client = mock_create_client # type: ignore[method-assign] + return storage + + +def create_mock_ft_search_response( + records: list[tuple[MemoryRecord, float]] +) -> list[int | str | list[str]]: + """Create a mock FT.SEARCH response. + + Args: + records: List of (MemoryRecord, score) tuples to include in response. + + Returns: + Mock FT.SEARCH response in the format: + [total_count, doc1_key, [field1, value1, ...], doc2_key, [field2, value2, ...], ...] + """ + import json + + result: list[int | str | list[str]] = [len(records)] # Total count + + for record, score in records: + # Add document key + result.append(f"record:{record.id}") + + # Add document fields as list [field, value, field, value, ...] + fields: list[str] = [] + fields.extend(["id", record.id]) + fields.extend(["content", record.content]) + fields.extend(["scope", record.scope]) + fields.extend(["categories", json.dumps(record.categories)]) + fields.extend(["metadata", json.dumps(record.metadata)]) + fields.extend(["importance", str(record.importance)]) + fields.extend(["created_at", record.created_at.isoformat()]) + fields.extend(["last_accessed", record.last_accessed.isoformat()]) + fields.extend(["source", record.source or ""]) + fields.extend(["private", "true" if record.private else "false"]) + + # Add score (Valkey Search returns cosine distance, not similarity) + # Convert similarity to distance: distance = 2 * (1 - similarity) + distance = 2.0 * (1.0 - score) + fields.extend(["__score", str(distance)]) + + # Add embedding if present + if record.embedding: + # Store as mock for testing (actual implementation uses bytes) + fields.extend(["embedding", json.dumps(record.embedding)]) + + result.append(fields) + + return result + + +class TestValkeyStorageVectorSearch: + """Tests for ValkeyStorage vector search operation.""" + + @pytest.mark.asyncio + async def test_search_with_no_filters_returns_all_records( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with no filters returns all records.""" + # Create test records + record1 = MemoryRecord( + id="record-1", + content="First test record", + scope="/test", + categories=["cat1"], + metadata={"key": "value1"}, + importance=0.8, + created_at=datetime(2024, 1, 1, 10, 0, 0), + last_accessed=datetime(2024, 1, 1, 11, 0, 0), + embedding=[0.1, 0.2, 0.3, 0.4], + ) + record2 = MemoryRecord( + id="record-2", + content="Second test record", + scope="/test", + categories=["cat2"], + metadata={"key": "value2"}, + importance=0.6, + created_at=datetime(2024, 1, 2, 10, 0, 0), + last_accessed=datetime(2024, 1, 2, 11, 0, 0), + embedding=[0.2, 0.3, 0.4, 0.5], + ) + + # Mock FT.INFO to simulate index exists + # Mock FT.SEARCH to return both records + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO response + create_mock_ft_search_response([ + (record1, 0.95), + (record2, 0.85), + ]), # FT.SEARCH response + ] + + # Perform search with no filters + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify FT.SEARCH was called + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + # Verify query contains only KNN part (no filters) + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + assert "*=>[KNN 10 @embedding $BLOB AS score]" in query + assert "@scope" not in query + assert "@categories" not in query + + # Verify results + assert len(results) == 2 + assert results[0][0].id == "record-1" + assert results[0][1] == 0.95 + assert results[1][0].id == "record-2" + assert results[1][1] == 0.85 + + @pytest.mark.asyncio + async def test_search_with_scope_filter_only( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with scope filter only.""" + # Create test records + record1 = MemoryRecord( + id="record-1", + content="Record in scope", + scope="/agent/task", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search with scope filter + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + scope_prefix="/agent", + limit=10 + ) + + # Verify FT.SEARCH was called with scope filter + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + # Verify query contains scope filter + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + assert "(@scope:{/agent*})=>[KNN 10 @embedding $BLOB AS score]" in query + + # Verify results + assert len(results) == 1 + assert results[0][0].id == "record-1" + assert results[0][0].scope == "/agent/task" + + @pytest.mark.asyncio + async def test_search_with_category_filter_only( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with category filter only.""" + # Create test records + record1 = MemoryRecord( + id="record-1", + content="Record with planning category", + scope="/test", + categories=["planning"], + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.88)]), + ] + + # Perform search with category filter + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + categories=["planning", "execution"], + limit=10 + ) + + # Verify FT.SEARCH was called with category filter + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + # Verify query contains category filter with OR logic + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + assert "(@categories:{planning|execution})=>[KNN 10 @embedding $BLOB AS score]" in query + + # Verify results + assert len(results) == 1 + assert results[0][0].id == "record-1" + assert "planning" in results[0][0].categories + + @pytest.mark.asyncio + async def test_search_with_metadata_filter_only( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with metadata filter only.""" + # Create test records + record1 = MemoryRecord( + id="record-1", + content="Record with metadata", + scope="/test", + metadata={"agent_id": "agent-1", "priority": "high"}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.92)]), + ] + + # Perform search with metadata filter + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + metadata_filter={"agent_id": "agent-1", "priority": "high"}, + limit=10 + ) + + # Verify FT.SEARCH was called with metadata filters + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + # Verify query contains metadata filters (AND logic) + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # Both metadata filters should be present in parentheses + assert "@agent_id:{agent\\-1}" in query or "@agent_id:{agent-1}" in query + assert "@priority:{high}" in query + assert "=>[KNN 10 @embedding $BLOB AS score]" in query + + # Verify results + assert len(results) == 1 + assert results[0][0].id == "record-1" + assert results[0][0].metadata["agent_id"] == "agent-1" + assert results[0][0].metadata["priority"] == "high" + + @pytest.mark.asyncio + async def test_search_with_combined_filters( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with combined filters (scope + categories + metadata).""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Record matching all filters", + scope="/agent/task", + categories=["planning"], + metadata={"agent_id": "agent-1"}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.93)]), + ] + + # Perform search with all filters + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + scope_prefix="/agent", + categories=["planning"], + metadata_filter={"agent_id": "agent-1"}, + limit=10 + ) + + # Verify FT.SEARCH was called with all filters + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + # Verify query contains all filters + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # All filters should be present in the query + assert "@scope:{/agent*}" in query + assert "@categories:{planning}" in query + assert "@agent_id:{agent\\-1}" in query or "@agent_id:{agent-1}" in query + assert "=>[KNN 10 @embedding $BLOB AS score]" in query + + # Verify results + assert len(results) == 1 + assert results[0][0].id == "record-1" + + @pytest.mark.asyncio + async def test_search_respects_limit_parameter( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search respects limit parameter.""" + # Create 5 test records + records = [ + ( + MemoryRecord( + id=f"record-{i}", + content=f"Record {i}", + scope="/test", + embedding=[0.1 * i, 0.2 * i, 0.3 * i, 0.4 * i], + ), + 0.9 - (i * 0.1) # Descending scores + ) + for i in range(1, 6) + ] + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response(records[:3]), # Only return 3 records + ] + + # Perform search with limit=3 + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=3) + + # Verify FT.SEARCH was called with correct limit + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + # Verify KNN limit in query + query = search_cmd[2] + assert "=>[KNN 3 @embedding $BLOB AS score]" in query + + # Verify LIMIT clause in command + assert "LIMIT" in search_cmd + limit_idx = search_cmd.index("LIMIT") + assert search_cmd[limit_idx + 2] == "3" + + # Verify results respect limit + assert len(results) == 3 + + @pytest.mark.asyncio + async def test_search_respects_min_score_parameter( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search respects min_score parameter.""" + # Create test records with varying scores + record1 = MemoryRecord( + id="record-1", + content="High score record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + record2 = MemoryRecord( + id="record-2", + content="Medium score record", + scope="/test", + embedding=[0.2, 0.3, 0.4, 0.5], + ) + record3 = MemoryRecord( + id="record-3", + content="Low score record", + scope="/test", + embedding=[0.3, 0.4, 0.5, 0.6], + ) + + # Mock FT.INFO and FT.SEARCH to return all records + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([ + (record1, 0.95), + (record2, 0.75), + (record3, 0.55), + ]), + ] + + # Perform search with min_score=0.7 + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + limit=10, + min_score=0.7 + ) + + # Verify only records with score >= 0.7 are returned + assert len(results) == 2 + assert results[0][0].id == "record-1" + assert results[0][1] == 0.95 + assert results[1][0].id == "record-2" + assert results[1][1] == 0.75 + # record-3 with score 0.55 should be filtered out + + @pytest.mark.asyncio + async def test_search_returns_results_ordered_by_descending_score( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search returns results ordered by descending score.""" + # Create test records (intentionally out of order) + record1 = MemoryRecord( + id="record-1", + content="Medium score", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + record2 = MemoryRecord( + id="record-2", + content="Highest score", + scope="/test", + embedding=[0.2, 0.3, 0.4, 0.5], + ) + record3 = MemoryRecord( + id="record-3", + content="Lowest score", + scope="/test", + embedding=[0.3, 0.4, 0.5, 0.6], + ) + + # Mock FT.INFO and FT.SEARCH (return in random order) + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([ + (record1, 0.75), + (record2, 0.95), + (record3, 0.55), + ]), + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify results are ordered by descending score + assert len(results) == 3 + assert results[0][0].id == "record-2" + assert results[0][1] == 0.95 + assert results[1][0].id == "record-1" + assert results[1][1] == 0.75 + assert results[2][0].id == "record-3" + assert results[2][1] == 0.55 + + # Verify scores are in descending order + for i in range(len(results) - 1): + assert results[i][1] >= results[i + 1][1] + + @pytest.mark.asyncio + async def test_search_with_empty_results( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with no matching results.""" + # Mock FT.INFO and FT.SEARCH to return empty results + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + [0], # Total count = 0 + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify empty results + assert len(results) == 0 + + @pytest.mark.asyncio + async def test_search_with_special_characters_in_scope( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with special characters in scope prefix.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Record with special scope", + scope="/agent:task-1", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search with scope containing special characters + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + scope_prefix="/agent:task", + limit=10 + ) + + # Verify FT.SEARCH was called with escaped scope + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # Colon should be escaped + assert "@scope:{/agent\\:task*}" in query or "@scope:{/agent:task*}" in query + + @pytest.mark.asyncio + async def test_search_with_special_characters_in_categories( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with special characters in categories.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Record with special category", + scope="/test", + categories=["plan:execute"], + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search with category containing special characters + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + categories=["plan:execute"], + limit=10 + ) + + # Verify FT.SEARCH was called with escaped category + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # Colon should be escaped + assert "@categories:{plan\\:execute}" in query or "@categories:{plan:execute}" in query + + @pytest.mark.asyncio + async def test_search_with_numeric_metadata_values( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with numeric metadata values.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Record with numeric metadata", + scope="/test", + metadata={"count": 42, "score": 3.14}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search with numeric metadata filter + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + metadata_filter={"count": 42, "score": 3.14}, + limit=10 + ) + + # Verify FT.SEARCH was called with string-converted metadata values + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # Numeric values should be converted to strings + assert "@count:{42}" in query + # The decimal point gets escaped, so check for escaped version + assert "@score:{3" in query and "14}" in query + + @pytest.mark.asyncio + async def test_search_with_embedding_blob_parameter( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search passes embedding as BLOB parameter.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Test record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify FT.SEARCH was called with PARAMS for BLOB + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + # Verify PARAMS section + assert "PARAMS" in search_cmd + params_idx = search_cmd.index("PARAMS") + assert search_cmd[params_idx + 1] == "2" # 2 params (name + value) + assert search_cmd[params_idx + 2] == "BLOB" + # Verify BLOB value is bytes + blob_value = search_cmd[params_idx + 3] + assert isinstance(blob_value, bytes) + + @pytest.mark.asyncio + async def test_search_results_sorted_by_score( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search results are sorted by score (descending) automatically.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Test record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify FT.SEARCH command structure + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + # Verify no SORTBY clause (vector search results are auto-sorted by score) + assert "SORTBY" not in search_cmd + # Verify LIMIT clause is present + assert "LIMIT" in search_cmd + + @pytest.mark.asyncio + async def test_search_with_return_fields( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search includes RETURN clause with all record fields.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Test record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify FT.SEARCH includes RETURN clause + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + # Verify RETURN clause + assert "RETURN" in search_cmd + return_idx = search_cmd.index("RETURN") + assert search_cmd[return_idx + 1] == "11" # Number of fields (includes score) + + # Verify all required fields are in RETURN clause + required_fields = [ + "id", "content", "scope", "categories", "metadata", + "importance", "created_at", "last_accessed", "source", "private", + "score", + ] + for field in required_fields: + assert field in search_cmd[return_idx:return_idx + 13] + + @pytest.mark.asyncio + async def test_search_handles_valkey_search_not_available( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search raises error when Valkey Search module is not available.""" + # Mock FT.INFO to fail (index doesn't exist) + # Mock FT.CREATE to fail (Search module not available) + mock_glide_client.custom_command.side_effect = [ + Exception("Unknown Index name"), # FT.INFO fails + Exception("ERR unknown command 'FT.CREATE'"), # FT.CREATE fails + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + + # Verify RuntimeError is raised with descriptive message + with pytest.raises(RuntimeError, match="Valkey Search module is not available"): + await valkey_storage.asearch(query_embedding, limit=10) + + @pytest.mark.asyncio + async def test_search_handles_ft_search_error( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search handles FT.SEARCH errors gracefully.""" + # Mock FT.INFO to succeed + # Mock FT.SEARCH to fail + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, # FT.INFO succeeds + Exception("ERR unknown command 'FT.SEARCH'"), # FT.SEARCH fails + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + + # Verify RuntimeError is raised + with pytest.raises(RuntimeError, match="Valkey Search module is not available"): + await valkey_storage.asearch(query_embedding, limit=10) + + @pytest.mark.asyncio + async def test_search_handles_malformed_ft_search_response( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search handles malformed FT.SEARCH response gracefully.""" + # Mock FT.INFO to succeed + # Mock FT.SEARCH to return malformed response + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + None, # Malformed response (should be a list) + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify empty results are returned (graceful handling) + assert len(results) == 0 + + @pytest.mark.asyncio + async def test_search_handles_missing_score_field( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search handles missing score field in results.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Test record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Create mock response without score field + result: list[int | str | list[str]] = [1] # Total count + result.append(f"record:{record1.id}") + + # Add document fields without __score + fields: list[str] = [] + fields.extend(["id", record1.id]) + fields.extend(["content", record1.content]) + fields.extend(["scope", record1.scope]) + fields.extend(["categories", str(record1.categories)]) + fields.extend(["metadata", str(record1.metadata)]) + fields.extend(["importance", str(record1.importance)]) + fields.extend(["created_at", record1.created_at.isoformat()]) + fields.extend(["last_accessed", record1.last_accessed.isoformat()]) + fields.extend(["source", record1.source or ""]) + fields.extend(["private", "true" if record1.private else "false"]) + # No __score field + + result.append(fields) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + result, + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify record is returned with default score of 0.0 + assert len(results) == 1 + assert results[0][0].id == "record-1" + assert results[0][1] == 0.0 + + @pytest.mark.asyncio + async def test_search_filters_out_records_with_deserialization_errors( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search filters out records that fail deserialization.""" + # Create one valid record + valid_record = MemoryRecord( + id="valid-record", + content="Valid record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Create mock response with one valid and one truly invalid record + result: list[int | str | list[str]] = [2] # Total count + + # Add valid record + result.append(f"record:{valid_record.id}") + fields1: list[str] = [] + fields1.extend(["id", valid_record.id]) + fields1.extend(["content", valid_record.content]) + fields1.extend(["scope", valid_record.scope]) + fields1.extend(["categories", str(valid_record.categories)]) + fields1.extend(["metadata", str(valid_record.metadata)]) + fields1.extend(["importance", str(valid_record.importance)]) + fields1.extend(["created_at", valid_record.created_at.isoformat()]) + fields1.extend(["last_accessed", valid_record.last_accessed.isoformat()]) + fields1.extend(["source", valid_record.source or ""]) + fields1.extend(["private", "false"]) + fields1.extend(["__score", "0.1"]) + result.append(fields1) + + # Add truly invalid record (missing required fields causes deserialization failure) + result.append("record:invalid-record") + fields2: list[str] = [] + fields2.extend(["id", "invalid-record"]) + # Missing content, scope, and other required fields → KeyError in _dict_to_record + fields2.extend(["__score", "0.2"]) + result.append(fields2) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + result, + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify only valid record is returned + assert len(results) == 1 + assert results[0][0].id == "valid-record" + + @pytest.mark.asyncio + async def test_search_converts_cosine_distance_to_similarity( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search converts Valkey Search cosine distance to similarity score.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Test record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Valkey Search returns cosine distance (0 = identical, 2 = opposite) + # We need to convert to similarity: similarity = 1 - (distance / 2) + # Distance 0.1 -> Similarity 0.95 + # Distance 0.3 -> Similarity 0.85 + # Distance 1.0 -> Similarity 0.5 + + # Create mock response with distance scores + result: list[int | str | list[str]] = [1] + result.append(f"record:{record1.id}") + fields: list[str] = [] + fields.extend(["id", record1.id]) + fields.extend(["content", record1.content]) + fields.extend(["scope", record1.scope]) + fields.extend(["categories", str(record1.categories)]) + fields.extend(["metadata", str(record1.metadata)]) + fields.extend(["importance", str(record1.importance)]) + fields.extend(["created_at", record1.created_at.isoformat()]) + fields.extend(["last_accessed", record1.last_accessed.isoformat()]) + fields.extend(["source", record1.source or ""]) + fields.extend(["private", "false"]) + fields.extend(["__score", "0.1"]) # Distance = 0.1 + result.append(fields) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + result, + ] + + # Perform search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=10) + + # Verify similarity score is correctly converted + assert len(results) == 1 + assert results[0][0].id == "record-1" + # Distance 0.1 -> Similarity = 1 - (0.1 / 2) = 0.95 + assert abs(results[0][1] - 0.95) < 0.01 + + def test_search_sync_wrapper( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test that sync search wrapper calls async implementation.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Test record", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Call sync search + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = valkey_storage.search(query_embedding, limit=10) + + # Verify async operations were called + assert mock_glide_client.custom_command.call_count >= 2 + + # Verify results + assert len(results) == 1 + assert results[0][0].id == "record-1" + assert results[0][1] == 0.9 + + @pytest.mark.asyncio + async def test_search_with_multiple_categories_uses_or_logic( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with multiple categories uses OR logic.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Record with one matching category", + scope="/test", + categories=["planning"], + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search with multiple categories + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + categories=["planning", "execution", "review"], + limit=10 + ) + + # Verify FT.SEARCH was called with OR logic for categories + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # Categories should be joined with | (OR logic) + assert "@categories:{planning|execution|review}" in query + + # Verify record with only one matching category is returned + assert len(results) == 1 + assert results[0][0].id == "record-1" + + @pytest.mark.asyncio + async def test_search_with_multiple_metadata_filters_uses_and_logic( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with multiple metadata filters uses AND logic.""" + # Create test record + record1 = MemoryRecord( + id="record-1", + content="Record matching all metadata", + scope="/test", + metadata={"agent_id": "agent-1", "priority": "high", "status": "active"}, + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.9)]), + ] + + # Perform search with multiple metadata filters + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + metadata_filter={"agent_id": "agent-1", "priority": "high", "status": "active"}, + limit=10 + ) + + # Verify FT.SEARCH was called with AND logic for metadata + ft_search_calls = [ + call for call in mock_glide_client.custom_command.call_args_list + if call[0][0][0] == "FT.SEARCH" + ] + assert len(ft_search_calls) == 1 + + search_cmd = ft_search_calls[0][0][0] + query = search_cmd[2] + # All metadata filters should be present (AND logic) + assert "@agent_id:" in query + assert "@priority:" in query + assert "@status:" in query + + # Verify record matching all metadata is returned + assert len(results) == 1 + assert results[0][0].id == "record-1" + + @pytest.mark.asyncio + async def test_search_with_zero_limit_returns_empty( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with limit=0 returns empty results.""" + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + [0], # Empty results + ] + + # Perform search with limit=0 + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch(query_embedding, limit=0) + + # Verify empty results + assert len(results) == 0 + + @pytest.mark.asyncio + async def test_search_with_min_score_one_filters_all( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with min_score=1.0 filters out all non-perfect matches.""" + # Create test records with scores < 1.0 + record1 = MemoryRecord( + id="record-1", + content="High score but not perfect", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([(record1, 0.99)]), + ] + + # Perform search with min_score=1.0 + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + limit=10, + min_score=1.0 + ) + + # Verify all results are filtered out + assert len(results) == 0 + + @pytest.mark.asyncio + async def test_search_with_min_score_zero_returns_all( + self, valkey_storage: ValkeyStorage, mock_glide_client: AsyncMock + ) -> None: + """Test search with min_score=0.0 returns all results.""" + # Create test records with varying scores + record1 = MemoryRecord( + id="record-1", + content="High score", + scope="/test", + embedding=[0.1, 0.2, 0.3, 0.4], + ) + record2 = MemoryRecord( + id="record-2", + content="Low score", + scope="/test", + embedding=[0.2, 0.3, 0.4, 0.5], + ) + + # Mock FT.INFO and FT.SEARCH + mock_glide_client.custom_command.side_effect = [ + {"index_name": "memory_index"}, + create_mock_ft_search_response([ + (record1, 0.95), + (record2, 0.05), + ]), + ] + + # Perform search with min_score=0.0 + query_embedding = [0.1, 0.2, 0.3, 0.4] + results = await valkey_storage.asearch( + query_embedding, + limit=10, + min_score=0.0 + ) + + # Verify all results are returned + assert len(results) == 2 + assert results[0][0].id == "record-1" + assert results[1][0].id == "record-2" diff --git a/uv.lock b/uv.lock index 461c859a46..591da97212 100644 --- a/uv.lock +++ b/uv.lock @@ -13,7 +13,7 @@ resolution-markers = [ ] [options] -exclude-newer = "2026-04-22T16:00:00Z" +exclude-newer = "2026-04-23T04:00:00Z" [manifest] members = [ @@ -435,14 +435,15 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.11" +version = "1.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, + { name = "joserfc" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/10/b325d58ffe86815b399334a101e63bc6fa4e1953921cb23703b48a0a0220/authlib-1.6.11.tar.gz", hash = "sha256:64db35b9b01aeccb4715a6c9a6613a06f2bd7be2ab9d2eb89edd1dfc7580a38f", size = 165359, upload-time = "2026-04-16T07:22:50.279Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/82/4d0603f30c1b4629b1f091bb266b0d7986434891d6940a8c87f8098db24e/authlib-1.7.0.tar.gz", hash = "sha256:b3e326c9aa9cc3ea95fe7d89fd880722d3608da4d00e8a27e061e64b48d801d5", size = 175890, upload-time = "2026-04-18T11:00:28.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/2f/55fca558f925a51db046e5b929deb317ddb05afed74b22d89f4eca578980/authlib-1.6.11-py2.py3-none-any.whl", hash = "sha256:c8687a9a26451c51a34a06fa17bb97cb15bba46a6a626755e2d7f50da8bff3e3", size = 244469, upload-time = "2026-04-16T07:22:48.413Z" }, + { url = "https://files.pythonhosted.org/packages/ca/48/c954218b2a250e23f178f10167c4173fecb5a75d2c206f0a67ba58006c26/authlib-1.7.0-py2.py3-none-any.whl", hash = "sha256:e36817afb02f6f0b6bf55f150782499ddd6ddf44b402bb055d3263cc65ac9ae0", size = 258779, upload-time = "2026-04-18T11:00:26.64Z" }, ] [[package]] @@ -744,7 +745,7 @@ wheels = [ [[package]] name = "build" -version = "1.4.3" +version = "1.4.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "os_name == 'nt'" }, @@ -753,9 +754,9 @@ dependencies = [ { name = "pyproject-hooks" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3f/16/4b272700dea44c1d2e8ca963ebb3c684efe22b3eba8cfa31c5fdb60de707/build-1.4.3.tar.gz", hash = "sha256:5aa4231ae0e807efdf1fd0623e07366eca2ab215921345a2e38acdd5d0fa0a74", size = 89314, upload-time = "2026-04-10T21:25:40.857Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/ec/bf5ae0a7e5ab57abe8aabdd0759c971883895d1a20c49ae99f8146840c3c/build-1.4.4.tar.gz", hash = "sha256:f832ae053061f3fb524af812dc94b8b84bac6880cd587630e3b5d91a6a9c1703", size = 89220, upload-time = "2026-04-22T20:53:44.807Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/30/f169e1d8b2071beaf8b97088787e30662b1d8fb82f8c0941d14678c0cbf1/build-1.4.3-py3-none-any.whl", hash = "sha256:1bc22b19b383303de8f2c8554c9a32894a58d3f185fe3756b0b20d255bee9a38", size = 26171, upload-time = "2026-04-10T21:25:39.671Z" }, + { url = "https://files.pythonhosted.org/packages/fa/88/6764e7a109dd84294850741501145da90d13cdeac9d4e614929464a37420/build-1.4.4-py3-none-any.whl", hash = "sha256:8c3f48a6090b39edec1a273d2d57949aaf13723b01e02f9d518396887519f64d", size = 25921, upload-time = "2026-04-22T20:53:43.251Z" }, ] [[package]] @@ -778,20 +779,20 @@ filecache = [ [[package]] name = "cachetools" -version = "7.0.5" +version = "7.0.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/dd/57fe3fdb6e65b25a5987fd2cdc7e22db0aef508b91634d2e57d22928d41b/cachetools-7.0.5.tar.gz", hash = "sha256:0cd042c24377200c1dcd225f8b7b12b0ca53cc2c961b43757e774ebe190fd990", size = 37367, upload-time = "2026-03-09T20:51:29.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/7b/1755ed2c6bfabd1d98b37ae73152f8dcf94aa40fee119d163c19ed484704/cachetools-7.0.6.tar.gz", hash = "sha256:e5d524d36d65703a87243a26ff08ad84f73352adbeafb1cde81e207b456aaf24", size = 37526, upload-time = "2026-04-20T19:02:23.289Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/f3/39cf3367b8107baa44f861dc802cbf16263c945b62d8265d36034fc07bea/cachetools-7.0.5-py3-none-any.whl", hash = "sha256:46bc8ebefbe485407621d0a4264b23c080cedd913921bad7ac3ed2f26c183114", size = 13918, upload-time = "2026-03-09T20:51:27.33Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/cf76242a5da1410917107ff14551764aa405a5fd10cd10cf9a5ca8fa77f4/cachetools-7.0.6-py3-none-any.whl", hash = "sha256:4e94956cfdd3086f12042cdd29318f5ced3893014f7d0d059bf3ead3f85b7f8b", size = 13976, upload-time = "2026-04-20T19:02:21.187Z" }, ] [[package]] name = "certifi" -version = "2026.2.25" +version = "2026.4.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, ] [[package]] @@ -943,8 +944,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "bcrypt" }, { name = "build" }, - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, + { name = "grpcio" }, { name = "httpx" }, { name = "importlib-resources" }, { name = "jsonschema" }, @@ -1359,6 +1359,9 @@ qdrant-edge = [ tools = [ { name = "crewai-tools" }, ] +valkey = [ + { name = "valkey-glide" }, +] voyageai = [ { name = "voyageai" }, ] @@ -1420,9 +1423,10 @@ requires-dist = [ { name = "tomli", specifier = "~=2.0.2" }, { name = "tomli-w", specifier = "~=1.1.0" }, { name = "uv", specifier = "~=0.11.6" }, + { name = "valkey-glide", marker = "extra == 'valkey'", specifier = ">=1.3.0" }, { name = "voyageai", marker = "extra == 'voyageai'", specifier = "~=0.3.5" }, ] -provides-extras = ["a2a", "anthropic", "aws", "azure-ai-inference", "bedrock", "docling", "embeddings", "file-processing", "google-genai", "litellm", "mem0", "openpyxl", "pandas", "qdrant", "qdrant-edge", "tools", "voyageai", "watson"] +provides-extras = ["a2a", "anthropic", "aws", "azure-ai-inference", "bedrock", "docling", "embeddings", "file-processing", "google-genai", "litellm", "mem0", "openpyxl", "pandas", "qdrant", "qdrant-edge", "tools", "valkey", "voyageai", "watson"] [[package]] name = "crewai-devtools" @@ -1567,7 +1571,8 @@ rag = [ { name = "python-docx" }, ] scrapegraph-py = [ - { name = "scrapegraph-py" }, + { name = "scrapegraph-py", version = "1.47.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "scrapegraph-py", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, ] scrapfly-sdk = [ { name = "scrapfly-sdk" }, @@ -1600,8 +1605,7 @@ tavily-python = [ { name = "tavily-python" }, ] weaviate-client = [ - { name = "weaviate-client", version = "4.16.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "weaviate-client", version = "4.18.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, + { name = "weaviate-client" }, ] xml = [ { name = "unstructured", extra = ["all-docs", "local-inference"] }, @@ -1808,16 +1812,16 @@ wheels = [ [[package]] name = "databricks-sdk" -version = "0.102.0" +version = "0.103.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/b3/41ff1c3afe092df9085e084e0dc81c45bca5ed65f7b60dc59df0ade43c76/databricks_sdk-0.102.0.tar.gz", hash = "sha256:8fa5f82317ee27cc46323c6e2543d2cfefb4468653f92ba558271043c6f72fb9", size = 887450, upload-time = "2026-03-19T08:15:54.428Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/64/57f2b01460923094eba9f14c685f261e3574fe4ba1a4ab317662ca010990/databricks_sdk-0.103.0.tar.gz", hash = "sha256:bdc93a2382e5717edd39c2faa92e38606ccc48aead047fe2154243509861eb1a", size = 909288, upload-time = "2026-04-20T07:30:05.284Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/8c/d082bd5f72d7613524d5b35dfe1f71732b2246be2704fad68cd0e3fdd020/databricks_sdk-0.102.0-py3-none-any.whl", hash = "sha256:75d1253276ee8f3dd5e7b00d62594b7051838435e618f74a8570a6dbd723ec12", size = 838533, upload-time = "2026-03-19T08:15:52.248Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a7/9e2c8c960411da289c70b59e71d444c180633f940711a87f8795d3d4e3f6/databricks_sdk-0.103.0-py3-none-any.whl", hash = "sha256:eb6c1cdbe8dfe76590d049cbd03e35c45855d1bbc968d565183fa27b80ac3a76", size = 857469, upload-time = "2026-04-20T07:30:03.656Z" }, ] [[package]] @@ -2095,7 +2099,7 @@ chunking = [ [[package]] name = "docling-ibm-models" -version = "3.13.0" +version = "3.13.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "accelerate" }, @@ -2113,14 +2117,14 @@ dependencies = [ { name = "tqdm" }, { name = "transformers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/87/01bf0c710af37328aa3517b34e64c2a2f3a6283a1cfc8859ae05881dd769/docling_ibm_models-3.13.0.tar.gz", hash = "sha256:f402effae8a63b0e5c3b5ce13120601baa2cd8098beef1d53ab5a056443758d3", size = 98538, upload-time = "2026-03-27T15:49:57.569Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/c1/e589d1b67493c92f25451da5692e4ea3134cc20f44d5fb45f0e113c8535d/docling_ibm_models-3.13.1.tar.gz", hash = "sha256:f647ed00db33e4e522a2421aba2eb429d0d951900e8826aeac857fd0b09cec4e", size = 98660, upload-time = "2026-04-22T15:52:06.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/52/11a8c8fff80e1fa581173edcc91cc92ed24184519e746fe39456f617653d/docling_ibm_models-3.13.0-py3-none-any.whl", hash = "sha256:a11acc6034b06e0bed8dc0ca1fa700615b8246eacce411619168e1f6562b0d0d", size = 93855, upload-time = "2026-03-27T15:49:56.353Z" }, + { url = "https://files.pythonhosted.org/packages/7a/45/3207e4cc44fe6bd31541f6324a9da6e9057994deef3ca65058bfa6bacbb0/docling_ibm_models-3.13.1-py3-none-any.whl", hash = "sha256:7951e713a0dd48bc408d2bde97be983b2c6e0e9b58eb49381a279448aa68dd69", size = 94003, upload-time = "2026-04-22T15:52:04.864Z" }, ] [[package]] name = "docling-parse" -version = "5.9.0" +version = "5.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docling-core" }, @@ -2129,24 +2133,24 @@ dependencies = [ { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "tabulate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f9/10/69dc586f0ef54cc4e21e50debcb6bc52a77571482c88b7664aa725a7f150/docling_parse-5.9.0.tar.gz", hash = "sha256:c6812a143225490096cc2491a200b8731670c1dadff9aaf928c481bd5feba410", size = 66685491, upload-time = "2026-04-15T14:53:45.021Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/0c/48a9bf8f935903e3cb7e69f25026b0c376b1788097cce2c94b5b496ec26c/docling_parse-5.10.0.tar.gz", hash = "sha256:5f953f673893f801f742558c0d6329d903fa4bbf4e60415c757dcb36dcba90dc", size = 6651220, upload-time = "2026-04-22T09:01:36.991Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/a0/f04284a3e620d93d496ecfcf3e88bff46661c1bf0b2e90fe8c515ca6b6a4/docling_parse-5.9.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:e7794b173e4d9ae0ea061106aedc98093951394efc7305c7adffe4c43918369a", size = 8618285, upload-time = "2026-04-15T14:52:44.849Z" }, - { url = "https://files.pythonhosted.org/packages/bf/49/ed3b83457b4aef027ceff9d24348fb4397101497721d9449da8292eeb246/docling_parse-5.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21d1b0fdcb6965d3b1c1a224d87ce6cddc3c52649125ddec951d6b99dcda57da", size = 9335733, upload-time = "2026-04-15T14:52:47.188Z" }, - { url = "https://files.pythonhosted.org/packages/7c/45/cf9bfd6515d8e34181befa9a7567680fee7e109be5902138e665b3021179/docling_parse-5.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690f10074ec05c69fb76050c282965ed9072c16f8eb020bc2483e228f0dfe39e", size = 9578860, upload-time = "2026-04-15T14:52:49.939Z" }, - { url = "https://files.pythonhosted.org/packages/9a/94/873be136532196e7224c94810826c9517ae6b0065c620c288799c4f9d48b/docling_parse-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b54b2272af1a4b6812f30d3b77c7774b021f34b65f2ee7032c561da2cc2c0a8", size = 10385131, upload-time = "2026-04-15T14:52:52.732Z" }, - { url = "https://files.pythonhosted.org/packages/f4/6c/3d6a840a208835b18235dc39a55a49ffbe36b739dffcd23edb43d56f977e/docling_parse-5.9.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:5880485aaf7d16cb398c67fcb804abc52f3797364338354fcc13240dac0e829e", size = 8619332, upload-time = "2026-04-15T14:52:56.362Z" }, - { url = "https://files.pythonhosted.org/packages/a6/91/eb49ee414b97190303047abd888478fe9596ae9af7c631668bca37ce0b93/docling_parse-5.9.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:322152aa19c74547a145b1563c6a1d3a1773ad39fcf4c0a7554ef333701101de", size = 9294677, upload-time = "2026-04-15T14:52:59.318Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ba/8954e384e3e94b745279d5c213b5096a8bedce92ea69acea3377110835a6/docling_parse-5.9.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:afd7cd326ebe5de545e327f45b14be3e9b683efee0714d1b784f1314b1e22275", size = 9632461, upload-time = "2026-04-15T14:53:01.888Z" }, - { url = "https://files.pythonhosted.org/packages/9e/44/a786427fb8f77578639da41937f51284cff0b756d1507eeae5aee34c60ca/docling_parse-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:17dea2d9e467feb5b7fe53c58ed7493fffb9482563e8f065d426c87fe1078beb", size = 10386431, upload-time = "2026-04-15T14:53:04.538Z" }, - { url = "https://files.pythonhosted.org/packages/a5/c2/c98e01230920c151c679e4526fd655a8f10fe0ce9e34a4d49b3f456ee200/docling_parse-5.9.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f9bb08e9e26cdd30d102d1a81420aca4a4b4136af2070d179147529ed991a64f", size = 8620298, upload-time = "2026-04-15T14:53:07.311Z" }, - { url = "https://files.pythonhosted.org/packages/84/54/fc38b47d77d2ef97fdfb9a67e92daecaa68e29b3c54d6409f725b5901686/docling_parse-5.9.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e141b536ccd954b612f2d7a091bf31e4684af07866ad6fa8b92b83fd60972e4", size = 9295434, upload-time = "2026-04-15T14:53:10.189Z" }, - { url = "https://files.pythonhosted.org/packages/20/68/f5ba9c8bb743e65b79448089bf27d73189aca9ba781bd97d8712ff51595e/docling_parse-5.9.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27eb3358564998f5f85264b093efc6e09d967113211448438911c646baa8c9b8", size = 9633448, upload-time = "2026-04-15T14:53:12.767Z" }, - { url = "https://files.pythonhosted.org/packages/5e/22/986312f5d7ec860e83fed6b3a604a736700510cb04e0fd8b8ab52a3bfedc/docling_parse-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fcbea80304e7a1549e8cf049c0b3ff8b27e8d99150fc86e65fa1839506c7c002", size = 10388840, upload-time = "2026-04-15T14:53:15.495Z" }, - { url = "https://files.pythonhosted.org/packages/41/28/7284bc189214e5c2a9ed15d0849a51f44d40dd9df9238d03c6db664bfc9e/docling_parse-5.9.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:0ff97842fd48bcc0ffae3dc8dfd1c96cca45b024395bdabea1ff2706bd23b44e", size = 8620340, upload-time = "2026-04-15T14:53:17.994Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5a/5716684a43e6ff0199be57f3b2177b36c2f69449d63a1a5b4db5b5419800/docling_parse-5.9.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:292f54cceba3847d94a34c9110deb932df475185e0773a0297c17d646a0ec641", size = 9296689, upload-time = "2026-04-15T14:53:20.926Z" }, - { url = "https://files.pythonhosted.org/packages/91/36/0a7001fa865a7023b3b26b97eb16a0ad0dfa472836e4042a8053be39ce37/docling_parse-5.9.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3ae90c0444034b1252881c99cec3a02779108df71ccf5a8eafaec7d4c5b4a8e0", size = 9633550, upload-time = "2026-04-15T14:53:23.831Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ae/7880fd8b64b59f5d132426ec2cbe4db7595494254dbb3ffb5b9517ddb768/docling_parse-5.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:25a65bf93b826f733c3169623df720933294a89357c3dfef335e454b57507804", size = 10388600, upload-time = "2026-04-15T14:53:26.711Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1d/086597a6cf953150e27bc1c6f32ea01d9734af28db6efc5feaa892373535/docling_parse-5.10.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a7726170b9dfcaed04902352780272a1f6283248279c09601edb3c7c3ab0f67", size = 9110442, upload-time = "2026-04-22T09:00:55.955Z" }, + { url = "https://files.pythonhosted.org/packages/47/9b/bbb99211aeac737f07ee62943db2f205bec3855dd84408856621b4b9dff6/docling_parse-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7aac9696359984ad18d5a7abaed2a8f1a297cb738ff960104ae999239571c4", size = 9833510, upload-time = "2026-04-22T09:00:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/31/54/f45648728add59a387ba989fc7562d2327f9de59491a31b9e4188be9b78a/docling_parse-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8409a754a9a96f017316e57297536142d05da792f33e9f5b2ba93330a8d3528e", size = 10106128, upload-time = "2026-04-22T09:01:00.928Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c3/cda016c1cfab766751c346340e2d315bb208775cd051d2ffaeeef672ca75/docling_parse-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:162a6aeb9cf497546aa520faad22b42939ec516cdaee0cdbd6421d2e89aaf38d", size = 10909729, upload-time = "2026-04-22T09:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/78/7a/a60dde1f6b1f1c9679d8c327931f298b6757f053d58d1b34af57f40919b7/docling_parse-5.10.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:64619b98472c6a0c609de9a7095a7a8fc5970e92758f523264bc827946e74ed8", size = 9111152, upload-time = "2026-04-22T09:01:05.332Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d2/a775e15c2dae0996633cbfc1cfa03f92c06d9b5b0f7b688f2fb4993cd2d5/docling_parse-5.10.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4f166e227e8218f410c9ef976902fd3f29b6655705f288f1ba051582788e2c5", size = 9780057, upload-time = "2026-04-22T09:01:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/2e/90/ecab13d6123b957bc15de353fe2e3d9ade99d01fae8c8fdd03204f1b7d8b/docling_parse-5.10.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5983907a5b36a7242150537f8565958fe095829c5ab33ac72f368d1b97b21c3", size = 10158565, upload-time = "2026-04-22T09:01:09.266Z" }, + { url = "https://files.pythonhosted.org/packages/8e/49/56ca316fb35606720452e72f3e1c83f05974f0d56521c2512ffd1987bea2/docling_parse-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:966eebb69a4b73f20af461c5ee261097fd2dd923ffe18cd360ab999b23619130", size = 10910103, upload-time = "2026-04-22T09:01:11.641Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4f/4d8fdbece925b978070e6eb23f8c0ae24f8e9e0cd4a849813128a03297f2/docling_parse-5.10.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:10f2c564552c2a0d1ccbf38ea250bc1608abfb88a5b907ac49f3331157a4b77b", size = 9112861, upload-time = "2026-04-22T09:01:14.017Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fc/caf8ee42d0f15f6369a528b76b31377527d89e69ca9512657c665b63b464/docling_parse-5.10.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8f886566264793a084046b6d3ee75156510bfbee14360f700c5ddf38f113bad", size = 9780836, upload-time = "2026-04-22T09:01:15.77Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/bb6df3de415a639c2d84650924b488748404b39e0ed244b9a8c6dbc1a9b6/docling_parse-5.10.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6fd6668f4a0c27916ddc1ffada1dd483d34a2033bdb0794e81b1ebf4f5766f32", size = 10158920, upload-time = "2026-04-22T09:01:17.554Z" }, + { url = "https://files.pythonhosted.org/packages/db/4c/19a87434865c4c9dcb65f6b1e39f76c0b0acd3ecb6a420e4f5233ac89a2d/docling_parse-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c6ae34923f3d084ac3ff47cf2edd998439299c7bd4f1e2a8d2245d5e0697b0bf", size = 10912106, upload-time = "2026-04-22T09:01:19.704Z" }, + { url = "https://files.pythonhosted.org/packages/70/5c/a9e822d319beaef6ecfd915e5e44d4ce92bf70ec9b9fb2ebaac573e5cd7e/docling_parse-5.10.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f7221d8f5567d135818b78683625cef72e2a32833a446bcc9c83409792163522", size = 9112853, upload-time = "2026-04-22T09:01:21.921Z" }, + { url = "https://files.pythonhosted.org/packages/89/83/ec0d68f045c5ab1de4aa9ee7c8fa3c7c5b1b112dafbd6aef16e30fbbf126/docling_parse-5.10.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9209826a4c5bbfbebd479aa237ac6f43973fc75a68b8b7e5731b1720b248b92", size = 9781281, upload-time = "2026-04-22T09:01:24.067Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/c5f4de11adcd82ce93c21df4fb297f6ac93bf0953bd1f3b9796b97b81cd4/docling_parse-5.10.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c4a3bc04e954c7d271f54126d5a42f015ceac7a9fd22943c21751d172253d3f9", size = 10158786, upload-time = "2026-04-22T09:01:25.816Z" }, + { url = "https://files.pythonhosted.org/packages/ad/ca/ecc4736915b33a086687d0713ba34d98bd8c60172f70099c3d5ba1661a16/docling_parse-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:83edd224024e9f891e541aa0785103d6ae9f545d398d4053e37f8628444cf6b5", size = 10911860, upload-time = "2026-04-22T09:01:27.793Z" }, ] [[package]] @@ -2261,7 +2265,7 @@ wheels = [ [[package]] name = "exa-py" -version = "2.12.0" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpcore" }, @@ -2272,9 +2276,9 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/d2/22f8e5b83fb7ff1a5b19528b21bb908504c8b6a716309b169801881e64ff/exa_py-2.12.0.tar.gz", hash = "sha256:2cd5fe2d47d8e0221f87dcb2be0f007cc0a1f0a643b16dfc586ab1421998f4fc", size = 58731, upload-time = "2026-04-15T12:55:17.616Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/e4/11bbbc076ae420b9e00537945d48a03cb42cc6da63edc65bf50d23e4778e/exa_py-2.12.1.tar.gz", hash = "sha256:9ff1924fbfbcae822b20c0ddef0650fabc04ac75906b9153623eadc18135b7ce", size = 55792, upload-time = "2026-04-22T20:00:38.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/87/e5c458741a34c945d6b612ec54f00088a6869ffc4f3f8a7b06ae080ec6af/exa_py-2.12.0-py3-none-any.whl", hash = "sha256:78b954ca99151228e4b853bd25e58829048a9a601d6187001befa512e0143f8f", size = 73896, upload-time = "2026-04-15T12:55:16.03Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/0a504b6ce7c468595cd0551f65e5c464832a1d3af8dc8acd681e21696a5f/exa_py-2.12.1-py3-none-any.whl", hash = "sha256:9e735802161482a7d5b231376257883cb4e34dbd6f75ded04ab1a5a171b69d9f", size = 74512, upload-time = "2026-04-22T20:00:34.326Z" }, ] [[package]] @@ -2300,14 +2304,14 @@ wheels = [ [[package]] name = "faker" -version = "40.13.0" +version = "40.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/95/4822ffe94723553789aef783104f4f18fc20d7c4c68e1bbd633e11d09758/faker-40.13.0.tar.gz", hash = "sha256:a0751c84c3abac17327d7bb4c98e8afe70ebf7821e01dd7d0b15cd8856415525", size = 1962043, upload-time = "2026-04-06T16:44:55.68Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/13/6741787bd91c4109c7bed047d68273965cd52ce8a5f773c471b949334b6d/faker-40.15.0.tar.gz", hash = "sha256:20f3a6ec8c266b74d4c554e34118b21c3c2056c0b4a519d15c8decb3a4e6e795", size = 1967447, upload-time = "2026-04-17T20:05:27.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/8a/708103325edff16a0b0e004de0d37db8ba216a32713948c64d71f6d4a4c2/faker-40.13.0-py3-none-any.whl", hash = "sha256:c1298fd0d819b3688fb5fd358c4ba8f56c7c8c740b411fd3dbd8e30bf2c05019", size = 1994597, upload-time = "2026-04-06T16:44:53.698Z" }, + { url = "https://files.pythonhosted.org/packages/a7/a7/a600f8f30d4505e89166de51dd121bd540ab8e560e8cf0901de00a81de8c/faker-40.15.0-py3-none-any.whl", hash = "sha256:71ab3c3370da9d2205ab74ffb0fd51273063ad562b3a3bb69d0026a20923e318", size = 2004447, upload-time = "2026-04-17T20:05:25.437Z" }, ] [[package]] @@ -2414,11 +2418,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.28.0" +version = "3.29.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/17/6e8890271880903e3538660a21d63a6c1fea969ac71d0d6b608b78727fa9/filelock-3.28.0.tar.gz", hash = "sha256:4ed1010aae813c4ee8d9c660e4792475ee60c4a0ba76073ceaf862bd317e3ca6", size = 56474, upload-time = "2026-04-14T22:54:33.625Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/fe/997687a931ab51049acce6fa1f23e8f01216374ea81374ddee763c493db5/filelock-3.29.0.tar.gz", hash = "sha256:69974355e960702e789734cb4871f884ea6fe50bd8404051a3530bc07809cf90", size = 57571, upload-time = "2026-04-19T15:39:10.068Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/21/2f728888c45033d34a417bfcd248ea2564c9e08ab1bfd301377cf05d5586/filelock-3.28.0-py3-none-any.whl", hash = "sha256:de9af6712788e7171df1b28b15eba2446c69721433fa427a9bee07b17820a9db", size = 39189, upload-time = "2026-04-14T22:54:32.037Z" }, + { url = "https://files.pythonhosted.org/packages/81/47/dd9a212ef6e343a6857485ffe25bba537304f1913bdbed446a23f7f592e1/filelock-3.29.0-py3-none-any.whl", hash = "sha256:96f5f6344709aa1572bbf631c640e4ebeeb519e08da902c39a001882f30ac258", size = 39812, upload-time = "2026-04-19T15:39:08.752Z" }, ] [[package]] @@ -2432,7 +2436,7 @@ wheels = [ [[package]] name = "firecrawl-py" -version = "4.22.2" +version = "4.23.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -2443,9 +2447,9 @@ dependencies = [ { name = "requests" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/03/fc714c52f156add4c58665ff3ede3ff2b07d96e32742507ed94769a94227/firecrawl_py-4.22.2.tar.gz", hash = "sha256:c1bf17f6faf3b9599291e56d4b1b1d367777dbcf35b28568dd07084f1b0c9149", size = 174536, upload-time = "2026-04-15T21:34:42.124Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/a3/5088759334803f2efa1eaa0267d93804a71d934f3185ee125aee7f72f084/firecrawl_py-4.23.0.tar.gz", hash = "sha256:7c65a74e0d328a3cf4af1cd476af2ef34090326225fab65d3fe05a2d32d2b11b", size = 179393, upload-time = "2026-04-22T21:37:54.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/35/adc7ff46b0f06261ce70b43ab0861c895d12bde7a7ceea95e45d45cb0a82/firecrawl_py-4.22.2-py3-none-any.whl", hash = "sha256:9f13f55ec7e8eb61a7fe91a2af09d5dd5c7539ec3f64f66280a7ceaa8b1bad10", size = 217823, upload-time = "2026-04-15T21:34:40.496Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/9ceb86a012dd15c4a1eb176da239b3772bf34ced598d5ca176e2c53acfc0/firecrawl_py-4.23.0-py3-none-any.whl", hash = "sha256:1029f837d1485edf1006485ab3dd94a6a6f5225e4ffef1df2d3e9cdc5c4bd296", size = 224952, upload-time = "2026-04-22T21:37:52.082Z" }, ] [[package]] @@ -2618,14 +2622,14 @@ wheels = [ [[package]] name = "gitpython" -version = "3.1.46" +version = "3.1.47" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gitdb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/b5/59d16470a1f0dfe8c793f9ef56fd3826093fc52b3bd96d6b9d6c26c7e27b/gitpython-3.1.46.tar.gz", hash = "sha256:400124c7d0ef4ea03f7310ac2fbf7151e09ff97f2a3288d64a440c584a29c37f", size = 215371, upload-time = "2026-01-01T15:37:32.073Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/bd/50db468e9b1310529a19fce651b3b0e753b5c07954d486cba31bbee9a5d5/gitpython-3.1.47.tar.gz", hash = "sha256:dba27f922bd2b42cb54c87a8ab3cb6beb6bf07f3d564e21ac848913a05a8a3cd", size = 216978, upload-time = "2026-04-22T02:44:44.059Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl", hash = "sha256:79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058", size = 208620, upload-time = "2026-01-01T15:37:30.574Z" }, + { url = "https://files.pythonhosted.org/packages/f2/c5/a1bc0996af85757903cf2bf444a7824e68e0035ce63fb41d6f76f9def68b/gitpython-3.1.47-py3-none-any.whl", hash = "sha256:489f590edfd6d20571b2c0e72c6a6ac6915ee8b8cd04572330e3842207a78905", size = 209547, upload-time = "2026-04-22T02:44:41.271Z" }, ] [[package]] @@ -2646,8 +2650,7 @@ wheels = [ [package.optional-dependencies] grpc = [ - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, + { name = "grpcio" }, { name = "grpcio-status" }, ] @@ -2676,8 +2679,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"] }, { name = "google-auth" }, - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, + { name = "grpcio" }, { name = "proto-plus" }, { name = "protobuf" }, ] @@ -2770,15 +2772,8 @@ wheels = [ name = "grpcio" version = "1.78.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13' and platform_machine == 's390x'", - "python_full_version == '3.12.*' and platform_machine != 's390x'", - "python_full_version == '3.12.*' and platform_machine == 's390x'", - "python_full_version == '3.11.*' and platform_machine != 's390x'", - "python_full_version == '3.11.*' and platform_machine == 's390x'", -] dependencies = [ - { name = "typing-extensions", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } wheels = [ @@ -2824,83 +2819,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, ] -[[package]] -name = "grpcio" -version = "1.80.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13' and platform_machine != 's390x'", - "python_full_version < '3.11' and platform_machine != 's390x'", - "python_full_version < '3.11' and platform_machine == 's390x'", -] -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b7/48/af6173dbca4454f4637a4678b67f52ca7e0c1ed7d5894d89d434fecede05/grpcio-1.80.0.tar.gz", hash = "sha256:29aca15edd0688c22ba01d7cc01cb000d72b2033f4a3c72a81a19b56fd143257", size = 12978905, upload-time = "2026-03-30T08:49:10.502Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/cd/bb7b7e54084a344c03d68144450da7ddd5564e51a298ae1662de65f48e2d/grpcio-1.80.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:886457a7768e408cdce226ad1ca67d2958917d306523a0e21e1a2fdaa75c9c9c", size = 6050363, upload-time = "2026-03-30T08:46:20.894Z" }, - { url = "https://files.pythonhosted.org/packages/16/02/1417f5c3460dea65f7a2e3c14e8b31e77f7ffb730e9bfadd89eda7a9f477/grpcio-1.80.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7b641fc3f1dc647bfd80bd713addc68f6d145956f64677e56d9ebafc0bd72388", size = 12026037, upload-time = "2026-03-30T08:46:25.144Z" }, - { url = "https://files.pythonhosted.org/packages/43/98/c910254eedf2cae368d78336a2de0678e66a7317d27c02522392f949b5c6/grpcio-1.80.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:33eb763f18f006dc7fee1e69831d38d23f5eccd15b2e0f92a13ee1d9242e5e02", size = 6602306, upload-time = "2026-03-30T08:46:27.593Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f8/88ca4e78c077b2b2113d95da1e1ab43efd43d723c9a0397d26529c2c1a56/grpcio-1.80.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:52d143637e3872633fc7dd7c3c6a1c84e396b359f3a72e215f8bf69fd82084fc", size = 7301535, upload-time = "2026-03-30T08:46:29.556Z" }, - { url = "https://files.pythonhosted.org/packages/f9/96/f28660fe2fe0f153288bf4a04e4910b7309d442395135c88ed4f5b3b8b40/grpcio-1.80.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c51bf8ac4575af2e0678bccfb07e47321fc7acb5049b4482832c5c195e04e13a", size = 6808669, upload-time = "2026-03-30T08:46:31.984Z" }, - { url = "https://files.pythonhosted.org/packages/47/eb/3f68a5e955779c00aeef23850e019c1c1d0e032d90633ba49c01ad5a96e0/grpcio-1.80.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:50a9871536d71c4fba24ee856abc03a87764570f0c457dd8db0b4018f379fed9", size = 7409489, upload-time = "2026-03-30T08:46:34.684Z" }, - { url = "https://files.pythonhosted.org/packages/5b/a7/d2f681a4bfb881be40659a309771f3bdfbfdb1190619442816c3f0ffc079/grpcio-1.80.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a72d84ad0514db063e21887fbacd1fd7acb4d494a564cae22227cd45c7fbf199", size = 8423167, upload-time = "2026-03-30T08:46:36.833Z" }, - { url = "https://files.pythonhosted.org/packages/97/8a/29b4589c204959aa35ce5708400a05bba72181807c45c47b3ec000c39333/grpcio-1.80.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f7691a6788ad9196872f95716df5bc643ebba13c97140b7a5ee5c8e75d1dea81", size = 7846761, upload-time = "2026-03-30T08:46:40.091Z" }, - { url = "https://files.pythonhosted.org/packages/6b/d2/ed143e097230ee121ac5848f6ff14372dba91289b10b536d54fb1b7cbae7/grpcio-1.80.0-cp310-cp310-win32.whl", hash = "sha256:46c2390b59d67f84e882694d489f5b45707c657832d7934859ceb8c33f467069", size = 4156534, upload-time = "2026-03-30T08:46:42.026Z" }, - { url = "https://files.pythonhosted.org/packages/d5/c9/df8279bb49b29409995e95efa85b72973d62f8aeff89abee58c91f393710/grpcio-1.80.0-cp310-cp310-win_amd64.whl", hash = "sha256:dc053420fc75749c961e2a4c906398d7c15725d36ccc04ae6d16093167223b58", size = 4889869, upload-time = "2026-03-30T08:46:44.219Z" }, - { url = "https://files.pythonhosted.org/packages/5d/db/1d56e5f5823257b291962d6c0ce106146c6447f405b60b234c4f222a7cde/grpcio-1.80.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:dfab85db094068ff42e2a3563f60ab3dddcc9d6488a35abf0132daec13209c8a", size = 6055009, upload-time = "2026-03-30T08:46:46.265Z" }, - { url = "https://files.pythonhosted.org/packages/6e/18/c83f3cad64c5ca63bca7e91e5e46b0d026afc5af9d0a9972472ceba294b3/grpcio-1.80.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:5c07e82e822e1161354e32da2662f741a4944ea955f9f580ec8fb409dd6f6060", size = 12035295, upload-time = "2026-03-30T08:46:49.099Z" }, - { url = "https://files.pythonhosted.org/packages/0f/8e/e14966b435be2dda99fbe89db9525ea436edc79780431a1c2875a3582644/grpcio-1.80.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba0915d51fd4ced2db5ff719f84e270afe0e2d4c45a7bdb1e8d036e4502928c2", size = 6610297, upload-time = "2026-03-30T08:46:52.123Z" }, - { url = "https://files.pythonhosted.org/packages/cc/26/d5eb38f42ce0e3fdc8174ea4d52036ef8d58cc4426cb800f2610f625dd75/grpcio-1.80.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3cb8130ba457d2aa09fa6b7c3ed6b6e4e6a2685fce63cb803d479576c4d80e21", size = 7300208, upload-time = "2026-03-30T08:46:54.859Z" }, - { url = "https://files.pythonhosted.org/packages/25/51/bd267c989f85a17a5b3eea65a6feb4ff672af41ca614e5a0279cc0ea381c/grpcio-1.80.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:09e5e478b3d14afd23f12e49e8b44c8684ac3c5f08561c43a5b9691c54d136ab", size = 6813442, upload-time = "2026-03-30T08:46:57.056Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d9/d80eef735b19e9169e30164bbf889b46f9df9127598a83d174eb13a48b26/grpcio-1.80.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:00168469238b022500e486c1c33916acf2f2a9b2c022202cf8a1885d2e3073c1", size = 7414743, upload-time = "2026-03-30T08:46:59.682Z" }, - { url = "https://files.pythonhosted.org/packages/de/f2/567f5bd5054398ed6b0509b9a30900376dcf2786bd936812098808b49d8d/grpcio-1.80.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8502122a3cc1714038e39a0b071acb1207ca7844208d5ea0d091317555ee7106", size = 8426046, upload-time = "2026-03-30T08:47:02.474Z" }, - { url = "https://files.pythonhosted.org/packages/62/29/73ef0141b4732ff5eacd68430ff2512a65c004696997f70476a83e548e7e/grpcio-1.80.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ce1794f4ea6cc3ca29463f42d665c32ba1b964b48958a66497917fe9069f26e6", size = 7851641, upload-time = "2026-03-30T08:47:05.462Z" }, - { url = "https://files.pythonhosted.org/packages/46/69/abbfa360eb229a8623bab5f5a4f8105e445bd38ce81a89514ba55d281ad0/grpcio-1.80.0-cp311-cp311-win32.whl", hash = "sha256:51b4a7189b0bef2aa30adce3c78f09c83526cf3dddb24c6a96555e3b97340440", size = 4154368, upload-time = "2026-03-30T08:47:08.027Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d4/ae92206d01183b08613e846076115f5ac5991bae358d2a749fa864da5699/grpcio-1.80.0-cp311-cp311-win_amd64.whl", hash = "sha256:02e64bb0bb2da14d947a49e6f120a75e947250aebe65f9629b62bb1f5c14e6e9", size = 4894235, upload-time = "2026-03-30T08:47:10.839Z" }, - { url = "https://files.pythonhosted.org/packages/5c/e8/a2b749265eb3415abc94f2e619bbd9e9707bebdda787e61c593004ec927a/grpcio-1.80.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:c624cc9f1008361014378c9d776de7182b11fe8b2e5a81bc69f23a295f2a1ad0", size = 6015616, upload-time = "2026-03-30T08:47:13.428Z" }, - { url = "https://files.pythonhosted.org/packages/3e/97/b1282161a15d699d1e90c360df18d19165a045ce1c343c7f313f5e8a0b77/grpcio-1.80.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f49eddcac43c3bf350c0385366a58f36bed8cc2c0ec35ef7b74b49e56552c0c2", size = 12014204, upload-time = "2026-03-30T08:47:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/6e/5e/d319c6e997b50c155ac5a8cb12f5173d5b42677510e886d250d50264949d/grpcio-1.80.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d334591df610ab94714048e0d5b4f3dd5ad1bee74dfec11eee344220077a79de", size = 6563866, upload-time = "2026-03-30T08:47:18.588Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f6/fdd975a2cb4d78eb67769a7b3b3830970bfa2e919f1decf724ae4445f42c/grpcio-1.80.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0cb517eb1d0d0aaf1d87af7cc5b801d686557c1d88b2619f5e31fab3c2315921", size = 7273060, upload-time = "2026-03-30T08:47:21.113Z" }, - { url = "https://files.pythonhosted.org/packages/db/f0/a3deb5feba60d9538a962913e37bd2e69a195f1c3376a3dd44fe0427e996/grpcio-1.80.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4e78c4ac0d97dc2e569b2f4bcbbb447491167cb358d1a389fc4af71ab6f70411", size = 6782121, upload-time = "2026-03-30T08:47:23.827Z" }, - { url = "https://files.pythonhosted.org/packages/ca/84/36c6dcfddc093e108141f757c407902a05085e0c328007cb090d56646cdf/grpcio-1.80.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2ed770b4c06984f3b47eb0517b1c69ad0b84ef3f40128f51448433be904634cd", size = 7383811, upload-time = "2026-03-30T08:47:26.517Z" }, - { url = "https://files.pythonhosted.org/packages/7c/ef/f3a77e3dc5b471a0ec86c564c98d6adfa3510d38f8ee99010410858d591e/grpcio-1.80.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:256507e2f524092f1473071a05e65a5b10d84b82e3ff24c5b571513cfaa61e2f", size = 8393860, upload-time = "2026-03-30T08:47:29.439Z" }, - { url = "https://files.pythonhosted.org/packages/9b/8d/9d4d27ed7f33d109c50d6b5ce578a9914aa68edab75d65869a17e630a8d1/grpcio-1.80.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a6284a5d907c37db53350645567c522be314bac859a64a7a5ca63b77bb7958f", size = 7830132, upload-time = "2026-03-30T08:47:33.254Z" }, - { url = "https://files.pythonhosted.org/packages/14/e4/9990b41c6d7a44e1e9dee8ac11d7a9802ba1378b40d77468a7761d1ad288/grpcio-1.80.0-cp312-cp312-win32.whl", hash = "sha256:c71309cfce2f22be26aa4a847357c502db6c621f1a49825ae98aa0907595b193", size = 4140904, upload-time = "2026-03-30T08:47:35.319Z" }, - { url = "https://files.pythonhosted.org/packages/2f/2c/296f6138caca1f4b92a31ace4ae1b87dab692fc16a7a3417af3bb3c805bf/grpcio-1.80.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe648599c0e37594c4809d81a9e77bd138cc82eb8baa71b6a86af65426723ff", size = 4880944, upload-time = "2026-03-30T08:47:37.831Z" }, - { url = "https://files.pythonhosted.org/packages/2f/3a/7c3c25789e3f069e581dc342e03613c5b1cb012c4e8c7d9d5cf960a75856/grpcio-1.80.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e9e408fc016dffd20661f0126c53d8a31c2821b5c13c5d67a0f5ed5de93319ad", size = 6017243, upload-time = "2026-03-30T08:47:40.075Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/21a9806eb8240e174fd1ab0cd5b9aa948bb0e05c2f2f55f9d5d7405e6d08/grpcio-1.80.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:92d787312e613754d4d8b9ca6d3297e69994a7912a32fa38c4c4e01c272974b0", size = 12010840, upload-time = "2026-03-30T08:47:43.11Z" }, - { url = "https://files.pythonhosted.org/packages/18/3a/23347d35f76f639e807fb7a36fad3068aed100996849a33809591f26eca6/grpcio-1.80.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac393b58aa16991a2f1144ec578084d544038c12242da3a215966b512904d0f", size = 6567644, upload-time = "2026-03-30T08:47:46.806Z" }, - { url = "https://files.pythonhosted.org/packages/ff/40/96e07ecb604a6a67ae6ab151e3e35b132875d98bc68ec65f3e5ab3e781d7/grpcio-1.80.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:68e5851ac4b9afe07e7f84483803ad167852570d65326b34d54ca560bfa53fb6", size = 7277830, upload-time = "2026-03-30T08:47:49.643Z" }, - { url = "https://files.pythonhosted.org/packages/9b/e2/da1506ecea1f34a5e365964644b35edef53803052b763ca214ba3870c856/grpcio-1.80.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:873ff5d17d68992ef6605330127425d2fc4e77e612fa3c3e0ed4e668685e3140", size = 6783216, upload-time = "2026-03-30T08:47:52.817Z" }, - { url = "https://files.pythonhosted.org/packages/44/83/3b20ff58d0c3b7f6caaa3af9a4174d4023701df40a3f39f7f1c8e7c48f9d/grpcio-1.80.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2bea16af2750fd0a899bf1abd9022244418b55d1f37da2202249ba4ba673838d", size = 7385866, upload-time = "2026-03-30T08:47:55.687Z" }, - { url = "https://files.pythonhosted.org/packages/47/45/55c507599c5520416de5eefecc927d6a0d7af55e91cfffb2e410607e5744/grpcio-1.80.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba0db34f7e1d803a878284cd70e4c63cb6ae2510ba51937bf8f45ba997cefcf7", size = 8391602, upload-time = "2026-03-30T08:47:58.303Z" }, - { url = "https://files.pythonhosted.org/packages/10/bb/dd06f4c24c01db9cf11341b547d0a016b2c90ed7dbbb086a5710df7dd1d7/grpcio-1.80.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8eb613f02d34721f1acf3626dfdb3545bd3c8505b0e52bf8b5710a28d02e8aa7", size = 7826752, upload-time = "2026-03-30T08:48:01.311Z" }, - { url = "https://files.pythonhosted.org/packages/f9/1e/9d67992ba23371fd63d4527096eb8c6b76d74d52b500df992a3343fd7251/grpcio-1.80.0-cp313-cp313-win32.whl", hash = "sha256:93b6f823810720912fd131f561f91f5fed0fda372b6b7028a2681b8194d5d294", size = 4142310, upload-time = "2026-03-30T08:48:04.594Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e6/283326a27da9e2c3038bc93eeea36fb118ce0b2d03922a9cda6688f53c5b/grpcio-1.80.0-cp313-cp313-win_amd64.whl", hash = "sha256:e172cf795a3ba5246d3529e4d34c53db70e888fa582a8ffebd2e6e48bc0cba50", size = 4882833, upload-time = "2026-03-30T08:48:07.363Z" }, -] - -[[package]] -name = "grpcio-health-checking" -version = "1.71.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "protobuf", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/53/86/20994347ef36b7626fb74539f13128100dd8b7eaac67efc063264e6cdc80/grpcio_health_checking-1.71.2.tar.gz", hash = "sha256:1c21ece88c641932f432b573ef504b20603bdf030ad4e1ec35dd7fdb4ea02637", size = 16770, upload-time = "2025-06-28T04:24:08.768Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/74/7bc6ab96bf1083cab2684f9c3ae434caa638de3d5c5574e8435e2c146598/grpcio_health_checking-1.71.2-py3-none-any.whl", hash = "sha256:f91db41410d6bd18a7828c5b6ac2bebd77a63483263cbe42bf3c0c9b86cece33", size = 18918, upload-time = "2025-06-28T04:23:56.923Z" }, -] - [[package]] name = "grpcio-status" version = "1.71.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, + { name = "grpcio" }, { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fd/d1/b6e9877fedae3add1afdeae1f89d1927d296da9cf977eca0eb08fb8a460e/grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50", size = 13677, upload-time = "2025-06-28T04:24:05.426Z" } @@ -3100,7 +3025,7 @@ wheels = [ [[package]] name = "hyperbrowser" -version = "0.90.4" +version = "0.90.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -3108,9 +3033,9 @@ dependencies = [ { name = "pydantic" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/13/47/2709a71c27e3614147b8bd9df378474bf450da18fb4c16a03b25ebb641de/hyperbrowser-0.90.4.tar.gz", hash = "sha256:14272b7ad78b7a16ecdb0f992c830b3dc3099fcf99bf0c417e78b1f22f1cb946", size = 67090, upload-time = "2026-04-16T18:51:49.957Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/24/15422c8d5f51bb28a10e4a8ed14e3ec669ee6ff305482c8fc146f4e4ac13/hyperbrowser-0.90.5.tar.gz", hash = "sha256:55129c3124bc9908837b40a0f015e6250775885089a5f941e8fba849c748e1a8", size = 68889, upload-time = "2026-04-21T18:23:28.306Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/af/b781aa3ad78c85cb8fc10b13ef005ec1e75b691b1af4314e81e5a8318755/hyperbrowser-0.90.4-py3-none-any.whl", hash = "sha256:b0e19e67f80a32a59838ecd12427fd5f7a23279f3987f3d74da336b390af6f8b", size = 113577, upload-time = "2026-04-16T18:51:48.631Z" }, + { url = "https://files.pythonhosted.org/packages/69/96/edde54b8b50ca3b33befb926d01bcf73764fe98b823270646514084a5e7d/hyperbrowser-0.90.5-py3-none-any.whl", hash = "sha256:145909a6d8b19f5520a2d8e2ed9293c114412480a6b4d9a1c3c2fa5505ea5e9a", size = 115510, upload-time = "2026-04-21T18:23:26.756Z" }, ] [[package]] @@ -3177,20 +3102,20 @@ wheels = [ [[package]] name = "identify" -version = "2.6.18" +version = "2.6.19" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/46/c4/7fb4db12296cdb11893d61c92048fe617ee853f8523b9b296ac03b43757e/identify-2.6.18.tar.gz", hash = "sha256:873ac56a5e3fd63e7438a7ecbc4d91aca692eb3fefa4534db2b7913f3fc352fd", size = 99580, upload-time = "2026-03-15T18:39:50.319Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/63/51723b5f116cc04b061cb6f5a561790abf249d25931d515cd375e063e0f4/identify-2.6.19.tar.gz", hash = "sha256:6be5020c38fcb07da56c53733538a3081ea5aa70d36a156f83044bfbf9173842", size = 99567, upload-time = "2026-04-17T18:39:50.265Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/33/92ef41c6fad0233e41d3d84ba8e8ad18d1780f1e5d99b3c683e6d7f98b63/identify-2.6.18-py2.py3-none-any.whl", hash = "sha256:8db9d3c8ea9079db92cafb0ebf97abdc09d52e97f4dcf773a2e694048b7cd737", size = 99394, upload-time = "2026-03-15T18:39:48.915Z" }, + { url = "https://files.pythonhosted.org/packages/94/84/d9273cd09688070a6523c4aee4663a8538721b2b755c4962aafae0011e72/identify-2.6.19-py2.py3-none-any.whl", hash = "sha256:20e6a87f786f768c092a721ad107fc9df0eb89347be9396cadf3f4abbd1fb78a", size = 99397, upload-time = "2026-04-17T18:39:49.221Z" }, ] [[package]] name = "idna" -version = "3.11" +version = "3.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] [[package]] @@ -3485,6 +3410,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" }, ] +[[package]] +name = "joserfc" +version = "1.6.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/c6/de8fdbdfa75c8ca04fead38a82d573df8a82906e984c349d58665f459558/joserfc-1.6.4.tar.gz", hash = "sha256:34ce5f499bfcc5e9ad4cc75077f9278ab3227b71da9aaf28f9ab705f8a560d3c", size = 231866, upload-time = "2026-04-13T13:15:40.632Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/f7/210b27752e972edb36d239315b08d3eb6b14824cc4a590da2337d195260b/joserfc-1.6.4-py3-none-any.whl", hash = "sha256:3e4a22b509b41908989237a045e25c8308d5fd47ab96bdae2dd8057c6451003a", size = 70464, upload-time = "2026-04-13T13:15:39.259Z" }, +] + [[package]] name = "json-repair" version = "0.25.3" @@ -3688,19 +3625,19 @@ wheels = [ [[package]] name = "lance-namespace" -version = "0.6.1" +version = "0.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "lance-namespace-urllib3-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/9f/7906ba4117df8d965510285eaf07264a77de2fd283b9d44ec7fc63a4a57a/lance_namespace-0.6.1.tar.gz", hash = "sha256:f0deea442bd3f1056a8e2fed056ae2778e3356517ec2e680db049058b824d131", size = 10666, upload-time = "2026-03-17T17:55:44.977Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/24/3de040859ec51a778760c61a7d2285df6c4ceee67f7d36b510eafba80603/lance_namespace-0.7.0.tar.gz", hash = "sha256:ecfe1f1ed6abfb0e767ddf74196dd321eda920ded78fd6c5162b407b0efb2c6e", size = 10528, upload-time = "2026-04-21T23:43:46.364Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/91/aee1c0a04d17f2810173bd304bd444eb78332045df1b0c1b07cebd01f530/lance_namespace-0.6.1-py3-none-any.whl", hash = "sha256:9699c9e3f12236e5e08ea979cc4e036a8e3c67ed2f37ae6f25c5353ab908e1be", size = 12498, upload-time = "2026-03-17T17:55:44.062Z" }, + { url = "https://files.pythonhosted.org/packages/b8/58/4ceee78927be6897c363f6a7bcbe41596ddb924420a1a4add55878f8f45c/lance_namespace-0.7.0-py3-none-any.whl", hash = "sha256:b9a84eb1b2077116b70b2df26d27704c0b85a47174ba41ee1ad8b67e2370a16d", size = 12353, upload-time = "2026-04-21T23:43:42.223Z" }, ] [[package]] name = "lance-namespace-urllib3-client" -version = "0.6.1" +version = "0.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, @@ -3708,9 +3645,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/a1/8706a2be25bd184acccc411e48f1a42a4cbf3b6556cba15b9fcf4c15cfcc/lance_namespace_urllib3_client-0.6.1.tar.gz", hash = "sha256:31fbd058ce1ea0bf49045cdeaa756360ece0bc61e9e10276f41af6d217debe87", size = 182567, upload-time = "2026-03-17T17:55:46.87Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/2f/2f77b5c32cf8c0133f1763ee87d8030c3369483efbe76b976384362ffe92/lance_namespace_urllib3_client-0.7.0.tar.gz", hash = "sha256:e1384d4d0c6b1de4b344bb709ea53bd9cef6f471b864b1ee8968aa89698a30c5", size = 182930, upload-time = "2026-04-21T23:43:43.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/c7/cb9580602dec25f0fdd6005c1c9ba1d4c8c0c3dc8d543107e5a9f248bba8/lance_namespace_urllib3_client-0.6.1-py3-none-any.whl", hash = "sha256:b9c103e1377ad46d2bd70eec894bfec0b1e2133dae0964d7e4de543c6e16293b", size = 317111, upload-time = "2026-03-17T17:55:45.546Z" }, + { url = "https://files.pythonhosted.org/packages/13/ce/70adb410015e73ff4419c7e47edc5e1c142b13f4ed9438f600beaeedbcfd/lance_namespace_urllib3_client-0.7.0-py3-none-any.whl", hash = "sha256:d0aa13b510abf4b0cd8bc5c602207b8c9afc99ef4de1f4ea023492ccb5ec07e9", size = 313715, upload-time = "2026-04-21T23:43:44.973Z" }, ] [[package]] @@ -3793,7 +3730,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2 [[package]] name = "langsmith" -version = "0.7.32" +version = "0.7.33" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -3806,9 +3743,9 @@ dependencies = [ { name = "xxhash" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/b4/a0b4a501bee6b8a741ce29f8c48155b132118483cddc6f9247735ddb38fa/langsmith-0.7.32.tar.gz", hash = "sha256:b59b8e106d0e4c4842e158229296086e2aa7c561e3f602acda73d3ad0062e915", size = 1184518, upload-time = "2026-04-15T23:42:41.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/75/1ee27b3510bf5b1b569b9695c9466c256caab45885bd569c0c67720236ad/langsmith-0.7.33.tar.gz", hash = "sha256:fa2d81ad6e8374a81fda9291894f6fcae714e55fbf11a0b07578e3cd4b1ea384", size = 1186298, upload-time = "2026-04-20T16:17:54.583Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/bc/148f98ac7dad73ac5e1b1c985290079cfeeb9ba13d760a24f25002beb2c9/langsmith-0.7.32-py3-none-any.whl", hash = "sha256:e1fde928990c4c52f47dc5132708cec674355d9101723d564183e965f383bf5f", size = 378272, upload-time = "2026-04-15T23:42:39.905Z" }, + { url = "https://files.pythonhosted.org/packages/f4/76/53033db34ffccd25d62c32b23b9468f7228b455da6976e1c420ae31555c4/langsmith-0.7.33-py3-none-any.whl", hash = "sha256:5b535b991d52d3b664ebb8dc6f95afcf8d0acb42e062ac45a54a6a4820139f20", size = 378981, upload-time = "2026-04-20T16:17:52.503Z" }, ] [[package]] @@ -5454,8 +5391,7 @@ version = "1.34.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, + { name = "grpcio" }, { name = "opentelemetry-api" }, { name = "opentelemetry-exporter-otlp-proto-common" }, { name = "opentelemetry-proto" }, @@ -5720,11 +5656,11 @@ wheels = [ [[package]] name = "pathspec" -version = "1.0.4" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/17/9c3094b822982b9f1ea666d8580ce59000f61f87c1663556fb72031ad9ec/pathspec-1.1.0.tar.gz", hash = "sha256:f5d7c555da02fd8dde3e4a2354b6aba817a89112fa8f333f7917a2a4834dd080", size = 133918, upload-time = "2026-04-23T01:46:22.298Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c9/8eed0486f074e9f1ca7f8ce5ad663e65f12fdab344028d658fa1b03d35e0/pathspec-1.1.0-py3-none-any.whl", hash = "sha256:574b128f7456bd899045ccd142dd446af7e6cfd0072d63ad73fbc55fbb4aaa42", size = 56264, upload-time = "2026-04-23T01:46:20.606Z" }, ] [[package]] @@ -6257,54 +6193,54 @@ wheels = [ [[package]] name = "psycopg2-binary" -version = "2.9.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/f2/8e377d29c2ecf99f6062d35ea606b036e8800720eccfec5fe3dd672c2b24/psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2", size = 3756506, upload-time = "2025-10-10T11:10:30.144Z" }, - { url = "https://files.pythonhosted.org/packages/24/cc/dc143ea88e4ec9d386106cac05023b69668bd0be20794c613446eaefafe5/psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087", size = 3863943, upload-time = "2025-10-10T11:10:34.586Z" }, - { url = "https://files.pythonhosted.org/packages/8c/df/16848771155e7c419c60afeb24950b8aaa3ab09c0a091ec3ccca26a574d0/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d", size = 4410873, upload-time = "2025-10-10T11:10:38.951Z" }, - { url = "https://files.pythonhosted.org/packages/43/79/5ef5f32621abd5a541b89b04231fe959a9b327c874a1d41156041c75494b/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2", size = 4468016, upload-time = "2025-10-10T11:10:43.319Z" }, - { url = "https://files.pythonhosted.org/packages/f0/9b/d7542d0f7ad78f57385971f426704776d7b310f5219ed58da5d605b1892e/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b", size = 4164996, upload-time = "2025-10-10T11:10:46.705Z" }, - { url = "https://files.pythonhosted.org/packages/14/ed/e409388b537fa7414330687936917c522f6a77a13474e4238219fcfd9a84/psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14", size = 3981881, upload-time = "2025-10-30T02:54:57.182Z" }, - { url = "https://files.pythonhosted.org/packages/bf/30/50e330e63bb05efc6fa7c1447df3e08954894025ca3dcb396ecc6739bc26/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd", size = 3650857, upload-time = "2025-10-10T11:10:50.112Z" }, - { url = "https://files.pythonhosted.org/packages/f0/e0/4026e4c12bb49dd028756c5b0bc4c572319f2d8f1c9008e0dad8cc9addd7/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b", size = 3296063, upload-time = "2025-10-10T11:10:54.089Z" }, - { url = "https://files.pythonhosted.org/packages/2c/34/eb172be293c886fef5299fe5c3fcf180a05478be89856067881007934a7c/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152", size = 3043464, upload-time = "2025-10-30T02:55:02.483Z" }, - { url = "https://files.pythonhosted.org/packages/18/1c/532c5d2cb11986372f14b798a95f2eaafe5779334f6a80589a68b5fcf769/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e", size = 3345378, upload-time = "2025-10-10T11:11:01.039Z" }, - { url = "https://files.pythonhosted.org/packages/70/e7/de420e1cf16f838e1fa17b1120e83afff374c7c0130d088dba6286fcf8ea/psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39", size = 2713904, upload-time = "2025-10-10T11:11:04.81Z" }, - { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, - { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, - { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, - { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, - { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, - { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, - { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, - { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, - { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, - { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, - { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, - { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, - { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, - { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, - { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, - { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, - { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, - { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, - { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, - { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, - { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, - { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, - { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, - { url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" }, - { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, - { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, - { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, - { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, - { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, - { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, - { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, - { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, +version = "2.9.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/60/a3624f79acea344c16fbef3a94d28b89a8042ddfb8f3e4ca83f538671409/psycopg2_binary-2.9.12.tar.gz", hash = "sha256:5ac9444edc768c02a6b6a591f070b8aae28ff3a99be57560ac996001580f294c", size = 379686, upload-time = "2026-04-21T09:40:34.304Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/80/49bacf9e51617d8309f6f0123e29edc793f6f5f6700c7d1f1b20782fbb37/psycopg2_binary-2.9.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b818ceff717f98851a64bffd4c5eb5b3059ae280276dcecc52ac658dcf006a4", size = 3712314, upload-time = "2026-04-20T23:33:31.363Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f2/98eeac7d60c43df9338287834edf9b3e69be68a2db78a57b1b81d705e735/psycopg2_binary-2.9.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fa0d7caca8635c56e373055094eeda3208d901d55dd0ff5abc1d4e47f82b56", size = 3822389, upload-time = "2026-04-20T23:33:34.178Z" }, + { url = "https://files.pythonhosted.org/packages/9f/7c/30575e75f14d5351a56a1971bb43fe7f8bf7edf1b654fb1bec65c42a8812/psycopg2_binary-2.9.12-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:864c261b3690e1207d14bbfe0a61e27567981b80c47a778561e49f676f7ce433", size = 4578448, upload-time = "2026-04-20T23:33:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/4df366d89f28c527dc39d0b6c98a5ca74e30d37ac097b73f3352147568ae/psycopg2_binary-2.9.12-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c5ee5213445dd45312459029b8c4c0a695461eb517b753d2582315bd07995f5e", size = 4273705, upload-time = "2026-04-20T23:33:39.291Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/c566803818eb03161ba869b6ba612bf7ad56816d98b9e5121e0a22ad6b0b/psycopg2_binary-2.9.12-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6f9cae1f848779b5b01f417e762c40d026ea93eb0648249a604728cda991dde3", size = 5893784, upload-time = "2026-04-20T23:33:41.658Z" }, + { url = "https://files.pythonhosted.org/packages/63/fe/0dfa5797e0b229e0567bc378695224caf14d547f73b05be0c80549089772/psycopg2_binary-2.9.12-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:63a3ebbd543d3d1eda088ac99164e8c5bac15293ee91f20281fd17d050aee1c4", size = 4109306, upload-time = "2026-04-20T23:33:43.953Z" }, + { url = "https://files.pythonhosted.org/packages/3c/89/28063adf17a4ba501eedd9890feab0c649ee4d8bd0a97df0ff1e9584feab/psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d6fcbba8c9fed08a73b8ac61ea79e4821e45b1e92bb466230c5e746bbf3d5256", size = 3654400, upload-time = "2026-04-20T23:33:46.115Z" }, + { url = "https://files.pythonhosted.org/packages/84/94/5a01de0aa4ead0b8d8d1aa4ec18cec0bd36d03fa714eaa5bb8a0b1b50020/psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:36512911ebb2b60a0c3e44d0bb5048c1980aced91235d133b7874f3d1d93487c", size = 3299215, upload-time = "2026-04-20T23:33:48.202Z" }, + { url = "https://files.pythonhosted.org/packages/7a/85/723bb085a61c6ac2dc0a0043f375f2fe7365363e27b073bad56ca5bda979/psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:8ffdb59fe88f99589e34354a130217aa1fd2d615612402d6edc8b3dbc7a44463", size = 3047724, upload-time = "2026-04-20T23:33:50.74Z" }, + { url = "https://files.pythonhosted.org/packages/b4/67/4d8b1e0d2fc4166677380eac0edf9cdff91013aca2546e8ef7bc04b56158/psycopg2_binary-2.9.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a46fe069b65255df410f856d842bc235f90e22ffdf532dda625fd4213d3fd9b1", size = 3349183, upload-time = "2026-04-20T23:33:59.635Z" }, + { url = "https://files.pythonhosted.org/packages/73/99/21af7a5498637ea4dc91a17c281a53bc1d632fbafe00f6689fbfb32a9fed/psycopg2_binary-2.9.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab29414b25dcb698bf26bf213e3348abdcd07bbd5de032a5bec15bd75b298b03", size = 2757036, upload-time = "2026-04-20T23:34:01.606Z" }, + { url = "https://files.pythonhosted.org/packages/d5/19/d4ce60954f3bb9d8e3bc5e5c4d1f2487de2d3851bf2391d54954c9df12a6/psycopg2_binary-2.9.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5c8ce6c61bd1b1f6b9c24ee32211599f6166af2c55abb19456090a21fd16554b", size = 3712338, upload-time = "2026-04-20T23:34:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/53/71/c85409ee0d78890f0660eff262e815e7dd2bb741a17611d82e9e8cd9dc5e/psycopg2_binary-2.9.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4a9eaa6e7f4ff91bec10aa3fb296878e75187bced5cc4bafe17dc40915e1326", size = 3822407, upload-time = "2026-04-20T23:34:05.977Z" }, + { url = "https://files.pythonhosted.org/packages/3c/ed/60486c2c7f0d4d1ede2bfb1ed27e2498477ce646bc7f6b2759906303117e/psycopg2_binary-2.9.12-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c6528cefc8e50fcc6f4a107e27a672058b36cc5736d665476aeb413ba88dbb06", size = 4578425, upload-time = "2026-04-20T23:34:08.246Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b9/656cb03fad9f4f49f2145c334b1126ee75189929ca4e6187d485a2d59951/psycopg2_binary-2.9.12-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e4e184b1fb6072bf05388aa41c697e1b2d01b3473f107e7ec44f186a32cfd0b8", size = 4273709, upload-time = "2026-04-20T23:34:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/99/66/08cf0da0e25cc6fb142c89be45fc8418792858f0c4cbff5e24530ff02cd6/psycopg2_binary-2.9.12-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4766ab678563054d3f1d064a4db19cc4b5f9e3a8d9018592a8285cf200c248f3", size = 5893779, upload-time = "2026-04-20T23:34:13.905Z" }, + { url = "https://files.pythonhosted.org/packages/17/d7/eecd9ce8e146d3721115d82d3836efdbb712187e4590325df549989d18f4/psycopg2_binary-2.9.12-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5a0253224780c978746cb9be55a946bcdaf40fe3519c0f622924cdabdafe2c39", size = 4109308, upload-time = "2026-04-20T23:34:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/b1dc289b362cc8d45697b57eefbd673186f49a4ea0906928988e3affcc98/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0dc9228d47c46bda253d2ecd6bb93b56a9f2d7ad33b684a1fa3622bf74ffe30c", size = 3654405, upload-time = "2026-04-20T23:34:19.303Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/4c4aea6473214dbdbd0fbba11aa4691e76dc01722c55724c5951719865ff/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f921f3cd87035ef7df233383011d7a53ea1d346224752c1385f1edfd790ceb6a", size = 3299187, upload-time = "2026-04-20T23:34:21.206Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5d/b03b99986446a4f57b170ed9a2579fb7ff9783ca0fa5226b19db99737fee/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d999bd982a723113c1a45b55a7a6a90d64d0ed2278020ed625c490ff7bef96c", size = 3047716, upload-time = "2026-04-20T23:34:23.077Z" }, + { url = "https://files.pythonhosted.org/packages/14/86/382ee4afbd1d97500c9d2862b20c2fdeddf4b7335e984df3fb4309f64108/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29d4d134bd0ab46ffb04e94aa3c5fa3ef582e9026609165e2f758ff76fc3a3be", size = 3349237, upload-time = "2026-04-20T23:34:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/a8/16/9a57c75ba1eda7165c017342f526810d5f5a12647dde749c99ae9a7141d7/psycopg2_binary-2.9.12-cp311-cp311-win_amd64.whl", hash = "sha256:cb4a1dacdd48077150dc762a9e5ddbf32c256d66cb46f80839391aa458774936", size = 2757036, upload-time = "2026-04-20T23:34:27.77Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9f/ef4ef3c8e15083df90ca35265cfd1a081a2f0cc07bb229c6314c6af817f4/psycopg2_binary-2.9.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5cdc05117180c5fa9c40eea8ea559ce64d73824c39d928b7da9fb5f6a9392433", size = 3712459, upload-time = "2026-04-20T23:34:30.549Z" }, + { url = "https://files.pythonhosted.org/packages/b5/01/3dd14e46ba48c1e1a6ec58ee599fa1b5efa00c246d5046cd903d0eeb1af1/psycopg2_binary-2.9.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d3227a3bc228c10d21011a99245edca923e4e8bf461857e869a507d9a41fe9f6", size = 3822936, upload-time = "2026-04-20T23:34:32.77Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f7/0640e4901119d8a9f7a1784b927f494e2198e213ceb593753d1f2c8b1b30/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:995ce929eede89db6254b50827e2b7fd61e50d11f0b116b29fffe4a2e53c4580", size = 4578676, upload-time = "2026-04-20T23:34:35.18Z" }, + { url = "https://files.pythonhosted.org/packages/b0/55/44df3965b5f297c50cc0b1b594a31c67d6127a9d133045b8a66611b14dfb/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9fe06d93e72f1c048e731a2e3e7854a5bfaa58fc736068df90b352cefe66f03f", size = 4274917, upload-time = "2026-04-20T23:34:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/b0/4b/74535248b1eac0c9336862e8617c765ac94dac76f9e25d7c4a79588c8907/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40e7b28b63aaf737cb3a1edc3a9bbc9a9f4ad3dcb7152e8c1130e4050eddcb7d", size = 5894843, upload-time = "2026-04-20T23:34:40.856Z" }, + { url = "https://files.pythonhosted.org/packages/f2/ba/f1bf8d2ae71868ad800b661099086ee52bc0f8d9f05be1acd8ebb06757cc/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89d19a9f7899e8eb0656a2b3a08e0da04c720a06db6e0033eab5928aabe60fa9", size = 4110556, upload-time = "2026-04-20T23:34:44.016Z" }, + { url = "https://files.pythonhosted.org/packages/45/46/c15706c338403b7c420bcc0c2905aad116cc064545686d8bf85f1999ea00/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:612b965daee295ae2da8f8218ce1d274645dc76ef3f1abf6a0a94fd57eff876d", size = 3655714, upload-time = "2026-04-20T23:34:46.233Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7c/a2d5dc09b64a4564db242a0fe418fde7d33f6f8259dd2c5b9d7def00fb5a/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b9a339b79d37c1b45f3235265f07cdeb0cb5ad7acd2ac7720a5920989c17c24e", size = 3301154, upload-time = "2026-04-20T23:34:49.528Z" }, + { url = "https://files.pythonhosted.org/packages/c0/e8/cc8c9a4ce71461f9ec548d38cadc41dc184b34c73e6455450775a9334ccd/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3471336e1acfd9c7fe507b8bad5af9317b6a89294f9eb37bd9a030bb7bebcdc6", size = 3048882, upload-time = "2026-04-20T23:34:51.86Z" }, + { url = "https://files.pythonhosted.org/packages/19/6a/31e2296bc0787c5ab75d3d118e40b239db8151b5192b90b77c72bc9256e9/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7af18183109e23502c8b2ae7f6926c0882766f35b5175a4cd737ad825e4d7a1b", size = 3351298, upload-time = "2026-04-20T23:34:54.124Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a8/75f4e3e11203b590150abed2cf7794b9c9c9f7eceddae955191138b44dde/psycopg2_binary-2.9.12-cp312-cp312-win_amd64.whl", hash = "sha256:398fcd4db988c7d7d3713e2b8e18939776fd3fb447052daae4f24fa39daede4c", size = 2757230, upload-time = "2026-04-20T23:34:56.242Z" }, + { url = "https://files.pythonhosted.org/packages/91/bb/4608c96f970f6e0c56572e87027ef4404f709382a3503e9934526d7ba051/psycopg2_binary-2.9.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7c729a73c7b1b84de3582f73cdd27d905121dc2c531f3d9a3c32a3011033b965", size = 3712419, upload-time = "2026-04-20T23:34:58.754Z" }, + { url = "https://files.pythonhosted.org/packages/5e/af/48f76af9d50d61cf390f8cd657b503168b089e2e9298e48465d029fcc713/psycopg2_binary-2.9.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4413d0caef93c5cf50b96863df4c2efe8c269bf2267df353225595e7e15e8df7", size = 3822990, upload-time = "2026-04-20T23:35:00.821Z" }, + { url = "https://files.pythonhosted.org/packages/7a/df/aba0f99397cd811d32e06fc0cc781f1f3ce98bc0e729cb423925085d781a/psycopg2_binary-2.9.12-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:4dfcf8e45ebb0c663be34a3442f65e17311f3367089cd4e5e3a3e8e62c978777", size = 4578696, upload-time = "2026-04-20T23:35:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/eaa74021ac4e4d5c2f83d82fc6615a63f4fe6c94dc4e94c3990427053f67/psycopg2_binary-2.9.12-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c41321a14dd74aceb6a9a643b9253a334521babfa763fa873e33d89cfa122fb5", size = 4274982, upload-time = "2026-04-20T23:35:05.583Z" }, + { url = "https://files.pythonhosted.org/packages/35/ed/c25deff98bd26187ba48b3b250a3ffc3037c46c5b89362534a15d200e0db/psycopg2_binary-2.9.12-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83946ba43979ebfdc99a3cd0ee775c89f221df026984ba19d46133d8d75d3cd9", size = 5894867, upload-time = "2026-04-20T23:35:07.902Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/8d0e21ca77373c6c9589e5c4528f6e8f0c08c62cafc76fb0bddb7a2cee22/psycopg2_binary-2.9.12-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:411e85815652d13560fbe731878daa5d92378c4995a22302071890ec3397d019", size = 4110578, upload-time = "2026-04-20T23:35:10.149Z" }, + { url = "https://files.pythonhosted.org/packages/00/fc/f481e2435bd8f742d0123309174aae4165160ad3ef17c1b99c3622c241d2/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c8ad4c08e00f7679559eaed7aff1edfffc60c086b976f93972f686384a95e2c", size = 3655816, upload-time = "2026-04-20T23:35:12.56Z" }, + { url = "https://files.pythonhosted.org/packages/53/79/b9f46466bdbe9f239c96cde8be33c1aace4842f06013b47b730dc9759187/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:00814e40fa23c2b37ef0a1e3c749d89982c73a9cb5046137f0752a22d432e82f", size = 3301307, upload-time = "2026-04-20T23:35:15.029Z" }, + { url = "https://files.pythonhosted.org/packages/3f/19/7dc003b32fe35024df89b658104f7c8538a8b2dcbde7a4e746ce929742e7/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:98062447aebc20ed20add1f547a364fd0ef8933640d5372ff1873f8deb9b61be", size = 3048968, upload-time = "2026-04-20T23:35:16.757Z" }, + { url = "https://files.pythonhosted.org/packages/91/58/2dbd7db5c604d45f4950d988506aae672a14126ec22998ced5021cbb76bb/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:66a7685d7e548f10fb4ce32fb01a7b7f4aa702134de92a292c7bd9e0d3dbd290", size = 3351369, upload-time = "2026-04-20T23:35:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/42/ee/dee8dcaad07f735824de3d6563bc67119fa6c28257b17977a8d624f02fab/psycopg2_binary-2.9.12-cp313-cp313-win_amd64.whl", hash = "sha256:b6937f5fe4e180aeee87de907a2fa982ded6f7f15d7218f78a083e4e1d68f2a0", size = 2757347, upload-time = "2026-04-20T23:35:21.283Z" }, ] [[package]] @@ -6373,45 +6309,45 @@ wheels = [ [[package]] name = "pyarrow" -version = "23.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3fab8f82571844eb3c460f90a75583801d14ca0cc32b1acc8c361650e006fd56", size = 34307390, upload-time = "2026-02-16T10:08:08.654Z" }, - { url = "https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:3f91c038b95f71ddfc865f11d5876c42f343b4495535bd262c7b321b0b94507c", size = 35853761, upload-time = "2026-02-16T10:08:17.811Z" }, - { url = "https://files.pythonhosted.org/packages/2e/08/3e56a18819462210432ae37d10f5c8eed3828be1d6c751b6e6a2e93c286a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d0744403adabef53c985a7f8a082b502a368510c40d184df349a0a8754533258", size = 44493116, upload-time = "2026-02-16T10:08:25.792Z" }, - { url = "https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c33b5bf406284fd0bba436ed6f6c3ebe8e311722b441d89397c54f871c6863a2", size = 47564532, upload-time = "2026-02-16T10:08:34.27Z" }, - { url = "https://files.pythonhosted.org/packages/20/bc/73f611989116b6f53347581b02177f9f620efdf3cd3f405d0e83cdf53a83/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ddf743e82f69dcd6dbbcb63628895d7161e04e56794ef80550ac6f3315eeb1d5", size = 48183685, upload-time = "2026-02-16T10:08:42.889Z" }, - { url = "https://files.pythonhosted.org/packages/b0/cc/6c6b3ecdae2a8c3aced99956187e8302fc954cc2cca2a37cf2111dad16ce/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e052a211c5ac9848ae15d5ec875ed0943c0221e2fcfe69eee80b604b4e703222", size = 50605582, upload-time = "2026-02-16T10:08:51.641Z" }, - { url = "https://files.pythonhosted.org/packages/8d/94/d359e708672878d7638a04a0448edf7c707f9e5606cee11e15aaa5c7535a/pyarrow-23.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5abde149bb3ce524782d838eb67ac095cd3fd6090eba051130589793f1a7f76d", size = 27521148, upload-time = "2026-02-16T10:08:58.077Z" }, - { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, - { url = "https://files.pythonhosted.org/packages/0b/62/96459ef5b67957eac38a90f541d1c28833d1b367f014a482cb63f3b7cd2d/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:26d50dee49d741ac0e82185033488d28d35be4d763ae6f321f97d1140eb7a0e9", size = 47562811, upload-time = "2026-02-16T10:09:25.792Z" }, - { url = "https://files.pythonhosted.org/packages/7d/94/1170e235add1f5f45a954e26cd0e906e7e74e23392dcb560de471f7366ec/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c30143b17161310f151f4a2bcfe41b5ff744238c1039338779424e38579d701", size = 48183766, upload-time = "2026-02-16T10:09:34.645Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/39a42af4570377b99774cdb47f63ee6c7da7616bd55b3d5001aa18edfe4f/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db2190fa79c80a23fdd29fef4b8992893f024ae7c17d2f5f4db7171fa30c2c78", size = 50607669, upload-time = "2026-02-16T10:09:44.153Z" }, - { url = "https://files.pythonhosted.org/packages/00/ca/db94101c187f3df742133ac837e93b1f269ebdac49427f8310ee40b6a58f/pyarrow-23.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:f00f993a8179e0e1c9713bcc0baf6d6c01326a406a9c23495ec1ba9c9ebf2919", size = 27527698, upload-time = "2026-02-16T10:09:50.263Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, - { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, - { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, - { url = "https://files.pythonhosted.org/packages/2c/a5/da83046273d990f256cb79796a190bbf7ec999269705ddc609403f8c6b06/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:813d99f31275919c383aab17f0f455a04f5a429c261cc411b1e9a8f5e4aaaa05", size = 47586063, upload-time = "2026-02-16T10:10:17.95Z" }, - { url = "https://files.pythonhosted.org/packages/5b/3c/b7d2ebcff47a514f47f9da1e74b7949138c58cfeb108cdd4ee62f43f0cf3/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bf5842f960cddd2ef757d486041d57c96483efc295a8c4a0e20e704cbbf39c67", size = 48173045, upload-time = "2026-02-16T10:10:25.363Z" }, - { url = "https://files.pythonhosted.org/packages/43/b2/b40961262213beaba6acfc88698eb773dfce32ecdf34d19291db94c2bd73/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564baf97c858ecc03ec01a41062e8f4698abc3e6e2acd79c01c2e97880a19730", size = 50621741, upload-time = "2026-02-16T10:10:33.477Z" }, - { url = "https://files.pythonhosted.org/packages/f6/70/1fdda42d65b28b078e93d75d371b2185a61da89dda4def8ba6ba41ebdeb4/pyarrow-23.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:07deae7783782ac7250989a7b2ecde9b3c343a643f82e8a4df03d93b633006f0", size = 27620678, upload-time = "2026-02-16T10:10:39.31Z" }, - { url = "https://files.pythonhosted.org/packages/47/10/2cbe4c6f0fb83d2de37249567373d64327a5e4d8db72f486db42875b08f6/pyarrow-23.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6b8fda694640b00e8af3c824f99f789e836720aa8c9379fb435d4c4953a756b8", size = 34210066, upload-time = "2026-02-16T10:10:45.487Z" }, - { url = "https://files.pythonhosted.org/packages/cb/4f/679fa7e84dadbaca7a65f7cdba8d6c83febbd93ca12fa4adf40ba3b6362b/pyarrow-23.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:8ff51b1addc469b9444b7c6f3548e19dc931b172ab234e995a60aea9f6e6025f", size = 35825526, upload-time = "2026-02-16T10:10:52.266Z" }, - { url = "https://files.pythonhosted.org/packages/f9/63/d2747d930882c9d661e9398eefc54f15696547b8983aaaf11d4a2e8b5426/pyarrow-23.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:71c5be5cbf1e1cb6169d2a0980850bccb558ddc9b747b6206435313c47c37677", size = 44473279, upload-time = "2026-02-16T10:11:01.557Z" }, - { url = "https://files.pythonhosted.org/packages/b3/93/10a48b5e238de6d562a411af6467e71e7aedbc9b87f8d3a35f1560ae30fb/pyarrow-23.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9b6f4f17b43bc39d56fec96e53fe89d94bac3eb134137964371b45352d40d0c2", size = 47585798, upload-time = "2026-02-16T10:11:09.401Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/476943001c54ef078dbf9542280e22741219a184a0632862bca4feccd666/pyarrow-23.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fc13fc6c403d1337acab46a2c4346ca6c9dec5780c3c697cf8abfd5e19b6b37", size = 48179446, upload-time = "2026-02-16T10:11:17.781Z" }, - { url = "https://files.pythonhosted.org/packages/4b/b6/5dd0c47b335fcd8edba9bfab78ad961bd0fd55ebe53468cc393f45e0be60/pyarrow-23.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c16ed4f53247fa3ffb12a14d236de4213a4415d127fe9cebed33d51671113e2", size = 50623972, upload-time = "2026-02-16T10:11:26.185Z" }, - { url = "https://files.pythonhosted.org/packages/d5/09/a532297c9591a727d67760e2e756b83905dd89adb365a7f6e9c72578bcc1/pyarrow-23.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:cecfb12ef629cf6be0b1887f9f86463b0dd3dc3195ae6224e74006be4736035a", size = 27540749, upload-time = "2026-02-16T10:12:23.297Z" }, - { url = "https://files.pythonhosted.org/packages/a5/8e/38749c4b1303e6ae76b3c80618f84861ae0c55dd3c2273842ea6f8258233/pyarrow-23.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:29f7f7419a0e30264ea261fdc0e5fe63ce5a6095003db2945d7cd78df391a7e1", size = 34471544, upload-time = "2026-02-16T10:11:32.535Z" }, - { url = "https://files.pythonhosted.org/packages/a3/73/f237b2bc8c669212f842bcfd842b04fc8d936bfc9d471630569132dc920d/pyarrow-23.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:33d648dc25b51fd8055c19e4261e813dfc4d2427f068bcecc8b53d01b81b0500", size = 35949911, upload-time = "2026-02-16T10:11:39.813Z" }, - { url = "https://files.pythonhosted.org/packages/0c/86/b912195eee0903b5611bf596833def7d146ab2d301afeb4b722c57ffc966/pyarrow-23.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd395abf8f91c673dd3589cadc8cc1ee4e8674fa61b2e923c8dd215d9c7d1f41", size = 44520337, upload-time = "2026-02-16T10:11:47.764Z" }, - { url = "https://files.pythonhosted.org/packages/69/c2/f2a717fb824f62d0be952ea724b4f6f9372a17eed6f704b5c9526f12f2f1/pyarrow-23.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:00be9576d970c31defb5c32eb72ef585bf600ef6d0a82d5eccaae96639cf9d07", size = 47548944, upload-time = "2026-02-16T10:11:56.607Z" }, - { url = "https://files.pythonhosted.org/packages/84/a7/90007d476b9f0dc308e3bc57b832d004f848fd6c0da601375d20d92d1519/pyarrow-23.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c2139549494445609f35a5cda4eb94e2c9e4d704ce60a095b342f82460c73a83", size = 48236269, upload-time = "2026-02-16T10:12:04.47Z" }, - { url = "https://files.pythonhosted.org/packages/b0/3f/b16fab3e77709856eb6ac328ce35f57a6d4a18462c7ca5186ef31b45e0e0/pyarrow-23.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7044b442f184d84e2351e5084600f0d7343d6117aabcbc1ac78eb1ae11eb4125", size = 50604794, upload-time = "2026-02-16T10:12:11.797Z" }, - { url = "https://files.pythonhosted.org/packages/e9/a1/22df0620a9fac31d68397a75465c344e83c3dfe521f7612aea33e27ab6c0/pyarrow-23.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a35581e856a2fafa12f3f54fce4331862b1cfb0bef5758347a858a4aa9d6bae8", size = 27660642, upload-time = "2026-02-16T10:12:17.746Z" }, +version = "24.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/91/13/13e1069b351bdc3881266e11147ffccf687505dbb0ea74036237f5d454a5/pyarrow-24.0.0.tar.gz", hash = "sha256:85fe721a14dd823aca09127acbb06c3ca723efbd436c004f16bca601b04dcc83", size = 1180261, upload-time = "2026-04-21T10:51:25.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/bf/a34fee1d624152124fa8355c42f34195ad5fe5233ce5bb87946432047d52/pyarrow-24.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:7c2b98645d576a0b9616892ead22b64a83a5f043c5e2ca15ebcefcb5b70c80cb", size = 35076681, upload-time = "2026-04-21T08:51:46.845Z" }, + { url = "https://files.pythonhosted.org/packages/1d/41/64180033d7027afce12dc96d0fe1f504c6fa112190582b458acea2399530/pyarrow-24.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:644a246325b8c69c595ad1dd4b463eba4b0cdb731370e4a86137d433208d6147", size = 36684260, upload-time = "2026-04-21T08:51:53.642Z" }, + { url = "https://files.pythonhosted.org/packages/57/02/9b9320e673dd8a99411fac78690f3df92f6dd6f59754c750110bca66d64e/pyarrow-24.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3a577bd840ca83f646f0a625dbc571dba7044c43c2d1503afc378b570954345c", size = 45698566, upload-time = "2026-04-21T10:46:02.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/33/f75e91b9a64c3f33c787e263c93b871ad91b8a4a68c1d5cebddd9840e835/pyarrow-24.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:e3268e43984d0b1a185c89b4cfff282a7ead12fc93f56cfd7088bdbcbe727041", size = 48835562, upload-time = "2026-04-21T10:46:10.278Z" }, + { url = "https://files.pythonhosted.org/packages/a5/63/097510448e47e4091faa41c43ba92f97cecaab8f4535b56a3d149578f634/pyarrow-24.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2392d954fcb920f42d230284b677605e4e2fbb11f2821e823e642abd67fbb491", size = 49394997, upload-time = "2026-04-21T10:46:18.08Z" }, + { url = "https://files.pythonhosted.org/packages/60/6b/c047d6222ab279024a062742d1807e2fbaf27bba88a98637299ff47b9236/pyarrow-24.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bec9373df11544592b0ba7ec2af0e35059e5f0e7647c6183a854dedd193298f1", size = 51911424, upload-time = "2026-04-21T10:46:25.347Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ba/464cc70761c2a525d97ebd84e21c31ebd47f3ef4bdcee117009f51c46f24/pyarrow-24.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:c42ab9439498270139cc63e18847a02afe5c8b3ed9c931266533cfe378bd3591", size = 27251730, upload-time = "2026-04-21T10:46:30.913Z" }, + { url = "https://files.pythonhosted.org/packages/62/c9/a47ab7ece0d86cbe6678418a0fbd1ac4bb493b9184a3891dfa0e7f287ae0/pyarrow-24.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b0e131f880cda8d04e076cee175a46fc0e8bc8b65c99c6c09dff6669335fde74", size = 35068898, upload-time = "2026-04-21T10:46:36.599Z" }, + { url = "https://files.pythonhosted.org/packages/d1/bc/8db86617a9a58008acf8913d6fed68ea2a46acb6de928db28d724c891a68/pyarrow-24.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:1b2fe7f9a5566401a0ef2571f197eb92358925c1f0c8dba305d6e43ea0871bb3", size = 36679915, upload-time = "2026-04-21T10:46:42.602Z" }, + { url = "https://files.pythonhosted.org/packages/eb/8e/fb178720400ef69db251eb4a9c3ccf4af269bc1feb5055529b8fc87170d1/pyarrow-24.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0b3537c00fb8d384f15ac1e79b6eb6db04a16514c8c1d22e59a9b95c8ba42868", size = 45697931, upload-time = "2026-04-21T10:46:48.403Z" }, + { url = "https://files.pythonhosted.org/packages/f3/27/99c42abe8e21b44f4917f62631f3aa31404882a2c41d8a4cd5c110e13d52/pyarrow-24.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:14e31a3c9e35f1ab6356c6378f6f72830e6d2d5f1791df3774a7b097d18a6a1e", size = 48837449, upload-time = "2026-04-21T10:46:55.329Z" }, + { url = "https://files.pythonhosted.org/packages/36/b6/333749e2666e9032891125bf9c691146e92901bece62030ac1430e2e7c88/pyarrow-24.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d9a514e73bc42711e6a35aaccf3587c520024fe0a25d830a1a8a27c15f4f57", size = 49395949, upload-time = "2026-04-21T10:47:01.869Z" }, + { url = "https://files.pythonhosted.org/packages/17/25/c5201706a2dd374e8ba6ee3fd7a8c89fb7ffc16eed5217a91fd2bd7f7626/pyarrow-24.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b196eb3f931862af3fa84c2a253514d859c08e0d8fe020e07be12e75a5a9780c", size = 51912986, upload-time = "2026-04-21T10:47:09.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d2/4d1bbba65320b21a49678d6fbdc6ff7c649251359fdcfc03568c4136231d/pyarrow-24.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:35405aecb474e683fb36af650618fd5340ee5471fc65a21b36076a18bbc6c981", size = 27255371, upload-time = "2026-04-21T10:47:15.943Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a9/9686d9f07837f91f775e8932659192e02c74f9d8920524b480b85212cc68/pyarrow-24.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:6233c9ed9ab9d1db47de57d9753256d9dcffbf42db341576099f0fd9f6bf4810", size = 34981559, upload-time = "2026-04-21T10:47:22.17Z" }, + { url = "https://files.pythonhosted.org/packages/80/b6/0ddf0e9b6ead3474ab087ae598c76b031fc45532bf6a63f3a553440fb258/pyarrow-24.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f7616236ec1bc2b15bfdec22a71ab38851c86f8f05ff64f379e1278cf20c634a", size = 36663654, upload-time = "2026-04-21T10:47:28.315Z" }, + { url = "https://files.pythonhosted.org/packages/7c/3b/926382efe8ce27ba729071d3566ade6dfb86bdf112f366000196b2f5780a/pyarrow-24.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:1617043b99bd33e5318ae18eb2919af09c71322ef1ca46566cdafc6e6712fb66", size = 45679394, upload-time = "2026-04-21T10:47:34.821Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7a/829f7d9dfd37c207206081d6dad474d81dde29952401f07f2ba507814818/pyarrow-24.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6165461f55ef6314f026de6638d661188e3455d3ec49834556a0ebbdbace18bb", size = 48863122, upload-time = "2026-04-21T10:47:42.056Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e8/f88ce625fe8babaae64e8db2d417c7653adb3019b08aae85c5ed787dc816/pyarrow-24.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3b13dedfe76a0ad2d1d859b0811b53827a4e9d93a0bcb05cf59333ab4980cc7e", size = 49376032, upload-time = "2026-04-21T10:47:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/36/7a/82c363caa145fff88fb475da50d3bf52bb024f61917be5424c3392eaf878/pyarrow-24.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:25ea65d868eb04015cd18e6df2fbe98f07e5bda2abefabcb88fce39a947716f6", size = 51929490, upload-time = "2026-04-21T10:47:55.981Z" }, + { url = "https://files.pythonhosted.org/packages/66/1c/e3e72c8014ad2743ca64a701652c733cc5cbcee15c0463a32a8c55518d9e/pyarrow-24.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:295f0a7f2e242dabd513737cf076007dc5b2d59237e3eca37b05c0c6446f3826", size = 27355660, upload-time = "2026-04-21T10:48:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d3/a1abf004482026ddc17f4503db227787fa3cfe41ec5091ff20e4fea55e57/pyarrow-24.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:02b001b3ed4723caa44f6cd1af2d5c86aa2cf9971dacc2ffa55b21237713dfba", size = 34976759, upload-time = "2026-04-21T10:48:07.258Z" }, + { url = "https://files.pythonhosted.org/packages/4f/4a/34f0a36d28a2dd32225301b79daad44e243dc1a2bb77d43b60749be255c4/pyarrow-24.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:04920d6a71aabd08a0417709efce97d45ea8e6fb733d9ca9ecffb13c67839f68", size = 36658471, upload-time = "2026-04-21T10:48:13.347Z" }, + { url = "https://files.pythonhosted.org/packages/1f/78/543b94712ae8bb1a6023bcc1acf1a740fbff8286747c289cd9468fced2a5/pyarrow-24.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a964266397740257f16f7bb2e4f08a0c81454004beab8ff59dd531b73610e9f2", size = 45675981, upload-time = "2026-04-21T10:48:20.201Z" }, + { url = "https://files.pythonhosted.org/packages/84/9f/8fb7c222b100d314137fa40ec050de56cd8c6d957d1cfff685ce72f15b17/pyarrow-24.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6f066b179d68c413374294bc1735f68475457c933258df594443bb9d88ddc2a0", size = 48859172, upload-time = "2026-04-21T10:48:27.541Z" }, + { url = "https://files.pythonhosted.org/packages/a7/d3/1ea72538e6c8b3b475ed78d1049a2c518e655761ea50fe1171fc855fcab7/pyarrow-24.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1183baeb14c5f587b1ec52831e665718ce632caab84b7cd6b85fd44f96114495", size = 49385733, upload-time = "2026-04-21T10:48:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/c3/be/c3d8b06a1ba35f2260f8e1f771abbee7d5e345c0937aab90675706b1690a/pyarrow-24.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:806f24b4085453c197a5078218d1ee08783ebbba271badd153d1ae22a3ee804f", size = 51934335, upload-time = "2026-04-21T10:48:42.099Z" }, + { url = "https://files.pythonhosted.org/packages/9c/62/89e07a1e7329d2cde3e3c6994ba0839a24977a2beda8be6005ea3d860b99/pyarrow-24.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:e4505fc6583f7b05ab854934896bcac8253b04ac1171a77dfb73efef92076d91", size = 27271748, upload-time = "2026-04-21T10:49:42.532Z" }, + { url = "https://files.pythonhosted.org/packages/17/1a/cff3a59f80b5b1658549d46611b67163f65e0664431c076ad728bf9d5af4/pyarrow-24.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:1a4e45017efbf115032e4475ee876d525e0e36c742214fbe405332480ecd6275", size = 35238554, upload-time = "2026-04-21T10:48:48.526Z" }, + { url = "https://files.pythonhosted.org/packages/a8/99/cce0f42a327bfef2c420fb6078a3eb834826e5d6697bf3009fe11d2ad051/pyarrow-24.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:7986f1fa71cee060ad00758bcc79d3a93bab8559bf978fab9e53472a2e25a17b", size = 36782301, upload-time = "2026-04-21T10:48:55.181Z" }, + { url = "https://files.pythonhosted.org/packages/2a/66/8e560d5ff6793ca29aca213c53eec0dd482dd46cb93b2819e5aab52e4252/pyarrow-24.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:d3e0b61e8efb24ed38898e5cdc5fffa9124be480008d401a1f8071500494ae42", size = 45721929, upload-time = "2026-04-21T10:49:03.676Z" }, + { url = "https://files.pythonhosted.org/packages/27/0c/a26e25505d030716e078d9f16eb74973cbf0b33b672884e9f9da1c83b871/pyarrow-24.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:55a3bc1e3df3b5567b7d27ef551b2283f0c68a5e86f1cd56abc569da4f31335b", size = 48825365, upload-time = "2026-04-21T10:49:11.714Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/771f9ecb0c65e73fe9dccdd1717901b9594f08c4515d000c7c62df573811/pyarrow-24.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:641f795b361874ac9da5294f8f443dfdbee355cf2bd9e3b8d97aaac2306b9b37", size = 49451819, upload-time = "2026-04-21T10:49:21.474Z" }, + { url = "https://files.pythonhosted.org/packages/48/da/61ae89a88732f5a785646f3ec6125dbb640fa98a540eb2b9889caa561403/pyarrow-24.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8adc8e6ce5fccf5dc707046ae4914fd537def529709cc0d285d37a7f9cd442ca", size = 51909252, upload-time = "2026-04-21T10:49:31.164Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1a/8dd5cafab7b66573fa91c03d06d213356ad4edd71813aa75e08ce2b3a844/pyarrow-24.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:9b18371ad2f44044b81a8d23bc2d8a9b6a6226dca775e8e16cfee640473d6c5d", size = 27388127, upload-time = "2026-04-21T10:49:37.334Z" }, ] [[package]] @@ -6824,53 +6760,53 @@ sdist = { url = "https://files.pythonhosted.org/packages/5d/ab/34ec41718af73c001 [[package]] name = "pymongo" -version = "4.16.0" +version = "4.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/65/9c/a4895c4b785fc9865a84a56e14b5bd21ca75aadc3dab79c14187cdca189b/pymongo-4.16.0.tar.gz", hash = "sha256:8ba8405065f6e258a6f872fe62d797a28f383a12178c7153c01ed04e845c600c", size = 2495323, upload-time = "2026-01-07T18:05:48.107Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/93/c36c0998dd91ad8b5031d2e77a903d5cd705b5ba05ca92bcc8731a2c3a8d/pymongo-4.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ed162b2227f98d5b270ecbe1d53be56c8c81db08a1a8f5f02d89c7bb4d19591d", size = 807993, upload-time = "2026-01-07T18:03:40.302Z" }, - { url = "https://files.pythonhosted.org/packages/f3/96/d2117d792fa9fedb2f6ccf0608db31f851e8382706d7c3c88c6ac92cc958/pymongo-4.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a9390dce61d705a88218f0d7b54d7e1fa1b421da8129fc7c009e029a9a6b81e", size = 808355, upload-time = "2026-01-07T18:03:42.13Z" }, - { url = "https://files.pythonhosted.org/packages/ae/2e/e79b7b86c0dd6323d0985c201583c7921d67b842b502aae3f3327cbe3935/pymongo-4.16.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:92a232af9927710de08a6c16a9710cc1b175fb9179c0d946cd4e213b92b2a69a", size = 1182337, upload-time = "2026-01-07T18:03:44.126Z" }, - { url = "https://files.pythonhosted.org/packages/7b/82/07ec9966381c57d941fddc52637e9c9653e63773be410bd8605f74683084/pymongo-4.16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4d79aa147ce86aef03079096d83239580006ffb684eead593917186aee407767", size = 1200928, upload-time = "2026-01-07T18:03:45.52Z" }, - { url = "https://files.pythonhosted.org/packages/44/15/9d45e3cc6fa428b0a3600b0c1c86b310f28c91251c41493460695ab40b6b/pymongo-4.16.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:19a1c96e7f39c7a59a9cfd4d17920cf9382f6f684faeff4649bf587dc59f8edc", size = 1239418, upload-time = "2026-01-07T18:03:47.03Z" }, - { url = "https://files.pythonhosted.org/packages/c8/b3/f35ee51e2a3f05f673ad4f5e803ae1284c42f4413e8d121c4958f1af4eb9/pymongo-4.16.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efe020c46ce3c3a89af6baec6569635812129df6fb6cf76d4943af3ba6ee2069", size = 1229045, upload-time = "2026-01-07T18:03:48.377Z" }, - { url = "https://files.pythonhosted.org/packages/18/2d/1688b88d7c0a5c01da8c703dea831419435d9ce67c6ddbb0ac629c9c72d2/pymongo-4.16.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9dc2c00bed568732b89e211b6adca389053d5e6d2d5a8979e80b813c3ec4d1f9", size = 1196517, upload-time = "2026-01-07T18:03:50.205Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c6/e89db0f23bd20757b627a5d8c73a609ffd6741887b9004ab229208a79764/pymongo-4.16.0-cp310-cp310-win32.whl", hash = "sha256:5b9c6d689bbe5beb156374508133218610e14f8c81e35bc17d7a14e30ab593e6", size = 794911, upload-time = "2026-01-07T18:03:52.701Z" }, - { url = "https://files.pythonhosted.org/packages/37/54/e00a5e517153f310a33132375159e42dceb12bee45b51b35aa0df14f1866/pymongo-4.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:2290909275c9b8f637b0a92eb9b89281e18a72922749ebb903403ab6cc7da914", size = 804801, upload-time = "2026-01-07T18:03:57.671Z" }, - { url = "https://files.pythonhosted.org/packages/e5/0a/2572faf89195a944c99c6d756227019c8c5f4b5658ecc261c303645dfe69/pymongo-4.16.0-cp310-cp310-win_arm64.whl", hash = "sha256:6af1aaa26f0835175d2200e62205b78e7ec3ffa430682e322cc91aaa1a0dbf28", size = 797579, upload-time = "2026-01-07T18:03:59.1Z" }, - { url = "https://files.pythonhosted.org/packages/e6/3a/907414a763c4270b581ad6d960d0c6221b74a70eda216a1fdd8fa82ba89f/pymongo-4.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f2077ec24e2f1248f9cac7b9a2dfb894e50cc7939fcebfb1759f99304caabef", size = 862561, upload-time = "2026-01-07T18:04:00.628Z" }, - { url = "https://files.pythonhosted.org/packages/8c/58/787d8225dd65cb2383c447346ea5e200ecfde89962d531111521e3b53018/pymongo-4.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d4f7ba040f72a9f43a44059872af5a8c8c660aa5d7f90d5344f2ed1c3c02721", size = 862923, upload-time = "2026-01-07T18:04:02.213Z" }, - { url = "https://files.pythonhosted.org/packages/5d/a7/cc2865aae32bc77ade7b35f957a58df52680d7f8506f93c6edbf458e5738/pymongo-4.16.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8a0f73af1ea56c422b2dcfc0437459148a799ef4231c6aee189d2d4c59d6728f", size = 1426779, upload-time = "2026-01-07T18:04:03.942Z" }, - { url = "https://files.pythonhosted.org/packages/81/25/3e96eb7998eec05382174da2fefc58d28613f46bbdf821045539d0ed60ab/pymongo-4.16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa30cd16ddd2f216d07ba01d9635c873e97ddb041c61cf0847254edc37d1c60e", size = 1454207, upload-time = "2026-01-07T18:04:05.387Z" }, - { url = "https://files.pythonhosted.org/packages/86/7b/8e817a7df8c5d565d39dd4ca417a5e0ef46cc5cc19aea9405f403fec6449/pymongo-4.16.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d638b0b1b294d95d0fdc73688a3b61e05cc4188872818cd240d51460ccabcb5", size = 1511654, upload-time = "2026-01-07T18:04:08.458Z" }, - { url = "https://files.pythonhosted.org/packages/39/7a/50c4d075ccefcd281cdcfccc5494caa5665b096b85e65a5d6afabb80e09e/pymongo-4.16.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:21d02cc10a158daa20cb040985e280e7e439832fc6b7857bff3d53ef6914ad50", size = 1496794, upload-time = "2026-01-07T18:04:10.355Z" }, - { url = "https://files.pythonhosted.org/packages/0f/cd/ebdc1aaca5deeaf47310c369ef4083e8550e04e7bf7e3752cfb7d95fcdb8/pymongo-4.16.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fbb8d3552c2ad99d9e236003c0b5f96d5f05e29386ba7abae73949bfebc13dd", size = 1448371, upload-time = "2026-01-07T18:04:11.76Z" }, - { url = "https://files.pythonhosted.org/packages/3d/c9/50fdd78c37f68ea49d590c027c96919fbccfd98f3a4cb39f84f79970bd37/pymongo-4.16.0-cp311-cp311-win32.whl", hash = "sha256:be1099a8295b1a722d03fb7b48be895d30f4301419a583dcf50e9045968a041c", size = 841024, upload-time = "2026-01-07T18:04:13.522Z" }, - { url = "https://files.pythonhosted.org/packages/4a/dd/a3aa1ade0cf9980744db703570afac70a62c85b432c391dea0577f6da7bb/pymongo-4.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:61567f712bda04c7545a037e3284b4367cad8d29b3dec84b4bf3b2147020a75b", size = 855838, upload-time = "2026-01-07T18:04:14.923Z" }, - { url = "https://files.pythonhosted.org/packages/bf/10/9ad82593ccb895e8722e4884bad4c5ce5e8ff6683b740d7823a6c2bcfacf/pymongo-4.16.0-cp311-cp311-win_arm64.whl", hash = "sha256:c53338613043038005bf2e41a2fafa08d29cdbc0ce80891b5366c819456c1ae9", size = 845007, upload-time = "2026-01-07T18:04:17.099Z" }, - { url = "https://files.pythonhosted.org/packages/6a/03/6dd7c53cbde98de469a3e6fb893af896dca644c476beb0f0c6342bcc368b/pymongo-4.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bd4911c40a43a821dfd93038ac824b756b6e703e26e951718522d29f6eb166a8", size = 917619, upload-time = "2026-01-07T18:04:19.173Z" }, - { url = "https://files.pythonhosted.org/packages/73/e1/328915f2734ea1f355dc9b0e98505ff670f5fab8be5e951d6ed70971c6aa/pymongo-4.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25a6b03a68f9907ea6ec8bc7cf4c58a1b51a18e23394f962a6402f8e46d41211", size = 917364, upload-time = "2026-01-07T18:04:20.861Z" }, - { url = "https://files.pythonhosted.org/packages/41/fe/4769874dd9812a1bc2880a9785e61eba5340da966af888dd430392790ae0/pymongo-4.16.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:91ac0cb0fe2bf17616c2039dac88d7c9a5088f5cb5829b27c9d250e053664d31", size = 1686901, upload-time = "2026-01-07T18:04:22.219Z" }, - { url = "https://files.pythonhosted.org/packages/fa/8d/15707b9669fdc517bbc552ac60da7124dafe7ac1552819b51e97ed4038b4/pymongo-4.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf0ec79e8ca7077f455d14d915d629385153b6a11abc0b93283ed73a8013e376", size = 1723034, upload-time = "2026-01-07T18:04:24.055Z" }, - { url = "https://files.pythonhosted.org/packages/5b/af/3d5d16ff11d447d40c1472da1b366a31c7380d7ea2922a449c7f7f495567/pymongo-4.16.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2d0082631a7510318befc2b4fdab140481eb4b9dd62d9245e042157085da2a70", size = 1797161, upload-time = "2026-01-07T18:04:25.964Z" }, - { url = "https://files.pythonhosted.org/packages/fb/04/725ab8664eeec73ec125b5a873448d80f5d8cf2750aaaf804cbc538a50a5/pymongo-4.16.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85dc2f3444c346ea019a371e321ac868a4fab513b7a55fe368f0cc78de8177cc", size = 1780938, upload-time = "2026-01-07T18:04:28.745Z" }, - { url = "https://files.pythonhosted.org/packages/22/50/dd7e9095e1ca35f93c3c844c92eb6eb0bc491caeb2c9bff3b32fe3c9b18f/pymongo-4.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dabbf3c14de75a20cc3c30bf0c6527157224a93dfb605838eabb1a2ee3be008d", size = 1714342, upload-time = "2026-01-07T18:04:30.331Z" }, - { url = "https://files.pythonhosted.org/packages/03/c9/542776987d5c31ae8e93e92680ea2b6e5a2295f398b25756234cabf38a39/pymongo-4.16.0-cp312-cp312-win32.whl", hash = "sha256:60307bb91e0ab44e560fe3a211087748b2b5f3e31f403baf41f5b7b0a70bd104", size = 887868, upload-time = "2026-01-07T18:04:32.124Z" }, - { url = "https://files.pythonhosted.org/packages/2e/d4/b4045a7ccc5680fb496d01edf749c7a9367cc8762fbdf7516cf807ef679b/pymongo-4.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:f513b2c6c0d5c491f478422f6b5b5c27ac1af06a54c93ef8631806f7231bd92e", size = 907554, upload-time = "2026-01-07T18:04:33.685Z" }, - { url = "https://files.pythonhosted.org/packages/60/4c/33f75713d50d5247f2258405142c0318ff32c6f8976171c4fcae87a9dbdf/pymongo-4.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:dfc320f08ea9a7ec5b2403dc4e8150636f0d6150f4b9792faaae539c88e7db3b", size = 892971, upload-time = "2026-01-07T18:04:35.594Z" }, - { url = "https://files.pythonhosted.org/packages/47/84/148d8b5da8260f4679d6665196ae04ab14ffdf06f5fe670b0ab11942951f/pymongo-4.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d15f060bc6d0964a8bb70aba8f0cb6d11ae99715438f640cff11bbcf172eb0e8", size = 972009, upload-time = "2026-01-07T18:04:38.303Z" }, - { url = "https://files.pythonhosted.org/packages/1e/5e/9f3a8daf583d0adaaa033a3e3e58194d2282737dc164014ff33c7a081103/pymongo-4.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a19ea46a0fe71248965305a020bc076a163311aefbaa1d83e47d06fa30ac747", size = 971784, upload-time = "2026-01-07T18:04:39.669Z" }, - { url = "https://files.pythonhosted.org/packages/ad/f2/b6c24361fcde24946198573c0176406bfd5f7b8538335f3d939487055322/pymongo-4.16.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:311d4549d6bf1f8c61d025965aebb5ba29d1481dc6471693ab91610aaffbc0eb", size = 1947174, upload-time = "2026-01-07T18:04:41.368Z" }, - { url = "https://files.pythonhosted.org/packages/47/1a/8634192f98cf740b3d174e1018dd0350018607d5bd8ac35a666dc49c732b/pymongo-4.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46ffb728d92dd5b09fc034ed91acf5595657c7ca17d4cf3751322cd554153c17", size = 1991727, upload-time = "2026-01-07T18:04:42.965Z" }, - { url = "https://files.pythonhosted.org/packages/5a/2f/0c47ac84572b28e23028a23a3798a1f725e1c23b0cf1c1424678d16aff42/pymongo-4.16.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:acda193f440dd88c2023cb00aa8bd7b93a9df59978306d14d87a8b12fe426b05", size = 2082497, upload-time = "2026-01-07T18:04:44.652Z" }, - { url = "https://files.pythonhosted.org/packages/ba/57/9f46ef9c862b2f0cf5ce798f3541c201c574128d31ded407ba4b3918d7b6/pymongo-4.16.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d9fdb386cf958e6ef6ff537d6149be7edb76c3268cd6833e6c36aa447e4443f", size = 2064947, upload-time = "2026-01-07T18:04:46.228Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/5421c0998f38e32288100a07f6cb2f5f9f352522157c901910cb2927e211/pymongo-4.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91899dd7fb9a8c50f09c3c1cf0cb73bfbe2737f511f641f19b9650deb61c00ca", size = 1980478, upload-time = "2026-01-07T18:04:48.017Z" }, - { url = "https://files.pythonhosted.org/packages/92/93/bfc448d025e12313a937d6e1e0101b50cc9751636b4b170e600fe3203063/pymongo-4.16.0-cp313-cp313-win32.whl", hash = "sha256:2cd60cd1e05de7f01927f8e25ca26b3ea2c09de8723241e5d3bcfdc70eaff76b", size = 934672, upload-time = "2026-01-07T18:04:49.538Z" }, - { url = "https://files.pythonhosted.org/packages/96/10/12710a5e01218d50c3dd165fd72c5ed2699285f77348a3b1a119a191d826/pymongo-4.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3ead8a0050c53eaa55935895d6919d393d0328ec24b2b9115bdbe881aa222673", size = 959237, upload-time = "2026-01-07T18:04:51.382Z" }, - { url = "https://files.pythonhosted.org/packages/0c/56/d288bcd1d05bc17ec69df1d0b1d67bc710c7c5dbef86033a5a4d2e2b08e6/pymongo-4.16.0-cp313-cp313-win_arm64.whl", hash = "sha256:dbbc5b254c36c37d10abb50e899bc3939bbb7ab1e7c659614409af99bd3e7675", size = 940909, upload-time = "2026-01-07T18:04:52.904Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ca/64/50be6fbac9c79fe2e4c17401a467da2d8764d82833d83cec325afe5cab32/pymongo-4.17.0.tar.gz", hash = "sha256:70ffa08ba641468cc068cf46c06b34f01a8ce3489f6411309fcb5ceabe6b2fc0", size = 2523370, upload-time = "2026-04-20T16:39:53.524Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/77/28ebbf69772a4341d530831c7a006cdb06877ac23075cb53b0a227df4fe1/pymongo-4.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47b021363cd923ace5edc7a1d63c0ff8a6d9d43859b8a1ba23645f5afae63221", size = 819234, upload-time = "2026-04-20T16:37:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/88/cf/5a70cee503ff9a2fea20607607f14d189f4d975960ac0945ec306ee7b695/pymongo-4.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:422fa50d7d7f5c22ea0953554396c9ef95684a2d775f860bd75a7b510538dfca", size = 819969, upload-time = "2026-04-20T16:37:24.187Z" }, + { url = "https://files.pythonhosted.org/packages/23/d5/07b7e27e662c58d872efd104a0e8055eb6569aa1b6d4da436f3fdee7f897/pymongo-4.17.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:addd0498ebbdc6354227f6ed457ed9fce442d48a3bb30d5b5bad33e104996561", size = 1244510, upload-time = "2026-04-20T16:37:26.069Z" }, + { url = "https://files.pythonhosted.org/packages/fb/be/7cac5b1e89bd5a8e395067648241390321593a7c29243e36f91343c02a90/pymongo-4.17.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5c8e180cb2cabe37300e1e36c60aa4f2ff956cc579f0142135a5d2cba252243", size = 1263245, upload-time = "2026-04-20T16:37:28.003Z" }, + { url = "https://files.pythonhosted.org/packages/2e/20/40e8e99824c1fda18261411e65ce3b0cd3d9a6ed3c056cdd0a569adc870b/pymongo-4.17.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bd835cdb37a1adec359dd072c24f8bb14809e2644fde86fab4ee2fc9719b9483", size = 1304113, upload-time = "2026-04-20T16:37:30.048Z" }, + { url = "https://files.pythonhosted.org/packages/3a/94/fb7e25441dd66f2069a9b172380849b0eaa5881c18b3db217bf64a6d393c/pymongo-4.17.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c4979e7e8887862bbb44d203f00cc8263a3f27237876fa691b6beba23e40e6d8", size = 1297046, upload-time = "2026-04-20T16:37:32.054Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c9/7352e0c20fe772541556e4d283c05e07ec48f8b0d2737ad930ac4a1b6655/pymongo-4.17.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77aa4bc164b4de60d5db193b322f0f5b6ead716e831031bfdef8e8bd92205556", size = 1265708, upload-time = "2026-04-20T16:37:33.934Z" }, + { url = "https://files.pythonhosted.org/packages/8d/e4/3df15494c2015ed297958517f0e4f6493e21b00990748068a973e66d45e0/pymongo-4.17.0-cp310-cp310-win32.whl", hash = "sha256:48bbc576677b50af043df870d84ded67cc3a9b4aa7553201beef4da5dc050a0a", size = 805533, upload-time = "2026-04-20T16:37:35.744Z" }, + { url = "https://files.pythonhosted.org/packages/22/fa/b4e71bb8cb82ad7d21bb4e8c476f2d573ba68b20368aac36ef06e4a196b4/pymongo-4.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46767f28dea610e02edf6c5d956ce615c3c7790ea396660b9b1efd5c5ead2e0", size = 815677, upload-time = "2026-04-20T16:37:37.808Z" }, + { url = "https://files.pythonhosted.org/packages/22/e2/0a4bba644f1cda3970ea1012149eeae3594ebfeed3f81fdaf32b61d90c95/pymongo-4.17.0-cp310-cp310-win_arm64.whl", hash = "sha256:757f2a4c0c2c46cab87df0333681ce69e86c9d5b45bc5203ceba5410b3489e59", size = 807293, upload-time = "2026-04-20T16:37:39.707Z" }, + { url = "https://files.pythonhosted.org/packages/c4/e2/336d86f221cf1b56b2ed9330d4a3b98f9f38f0b37829ae9a9184617d5419/pymongo-4.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4141e6c6a339789b2974efa00ecd9409101672d77a0e3ee2cc3839eedf8ec4df", size = 874668, upload-time = "2026-04-20T16:37:41.39Z" }, + { url = "https://files.pythonhosted.org/packages/34/8e/75d3c6c935d187ab59c61e9c15d9aab3f274b563eaf1706e8cae5f508dec/pymongo-4.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e68c76b84e0c132d9dbf9307f12ff8185702328187a87b9aca8c941303873433", size = 875294, upload-time = "2026-04-20T16:37:43.432Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ec/62e855744489dbcd54fd778aae4d80fa4c4819e8fb228ca0cf6f21a03997/pymongo-4.17.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ba2195d4f386f839a52a23ea1cfd60ffaaba78a3d7841db51b7e433001139918", size = 1496233, upload-time = "2026-04-20T16:37:45.518Z" }, + { url = "https://files.pythonhosted.org/packages/82/e8/93e4e5e5ce8fdf8929dabeefe24aafa5ce046028eed0dfa8eeb936e72c49/pymongo-4.17.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8446ff4bfcb6ec2a2e50998c860986a1e992136f998b7f53e7a717fb8aa5a0b9", size = 1522927, upload-time = "2026-04-20T16:37:47.492Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ca/425dc1d21e0f17bdea0072fc463f662f7fa06d2852af52975c9eced3c07c/pymongo-4.17.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2a0d5ac205728c86e0a02192f1aa5f865b0d7d51f8df6101c01a69a7fc620d72", size = 1583468, upload-time = "2026-04-20T16:37:49.221Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9d/f08b07eeffda1a43c1759f0fa625e88ae12360996eb56d42aad832fa7dff/pymongo-4.17.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:485c8a8eaa4c739f00a331fc73757898ee7c092c214a79e63866ff76aaf282ff", size = 1572787, upload-time = "2026-04-20T16:37:51.061Z" }, + { url = "https://files.pythonhosted.org/packages/e9/c2/6855a07aafa7b894929af23675b6fb9634800ce43122b76a62f6eeb8da2a/pymongo-4.17.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2dfcc795f5b9fedbe179a11fdf6051581479d196582a3fe819a92a00e9b9969", size = 1526184, upload-time = "2026-04-20T16:37:53.358Z" }, + { url = "https://files.pythonhosted.org/packages/4e/05/c952bac7db71c1942ea3559fcd308b49754cc5004b455935fb4000d1f37b/pymongo-4.17.0-cp311-cp311-win32.whl", hash = "sha256:c2292144505fb12156b981bd440f3dc994a883da06ac726c0c8692ccdbc1c510", size = 852621, upload-time = "2026-04-20T16:37:55.28Z" }, + { url = "https://files.pythonhosted.org/packages/11/c0/c04da9f4c0c6252404598f4e394b862a58a9e866822a70ae261c8a018fdf/pymongo-4.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:2e190827834fce70ecdf9d46796c6dbc0ce08ea87dc2ff5bc6f3f5579b605cb9", size = 867852, upload-time = "2026-04-20T16:37:57.233Z" }, + { url = "https://files.pythonhosted.org/packages/1d/b2/c7b4870fbeef471e947d3e014676f5910d02e0197074d692ebcf24ec049a/pymongo-4.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:a8f9c40a09bb7d4b9fc8b1da65ecf6efa79bda5cb2756f39d9b6940fac1d19ae", size = 855019, upload-time = "2026-04-20T16:37:58.983Z" }, + { url = "https://files.pythonhosted.org/packages/98/90/60bcb508840135d5ee46b51b1a950f548338aa8145a8366dbe6639ae51ac/pymongo-4.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53ffa94b2340dbf6b055e09a0090618c60482c158ecfc9565642fc996bf0944", size = 930529, upload-time = "2026-04-20T16:38:00.936Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e9/313840f1e52c6dfac47f704428cbfbce59956ebe7633bffc92b03f74f0ad/pymongo-4.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6fe0de9d0f6791abce3471230b32b4817bf89d27b1182b6a550e1ec0fa72aa9a", size = 930665, upload-time = "2026-04-20T16:38:02.915Z" }, + { url = "https://files.pythonhosted.org/packages/78/35/9d3565ea45b1606f635c1e2cd2563c28d66caafdc50f7ad7d979fcd1b363/pymongo-4.17.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e537e95514dae1aaa718f481ec03151a0f0394bcd05f1322896d8fc1330cb729", size = 1762369, upload-time = "2026-04-20T16:38:05.375Z" }, + { url = "https://files.pythonhosted.org/packages/95/ee/149b0d4b1a11c38bff6f14c23d5814c9b0843fd6dc38ad40596bdb1a62d2/pymongo-4.17.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:37a8385c29881b43eab31f584100fa0eaddedd5607adf010147ba1810118be90", size = 1798044, upload-time = "2026-04-20T16:38:07.195Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d4/4cee4a7b8d8f6f0550ef6cd2fea42455c5ed619a220cb6ba4fb40d6a5bc8/pymongo-4.17.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f3ee3d241ed77a4fc99ce3cff3b289c3ebce37f61fdd7349d3592c23b82c8784", size = 1878567, upload-time = "2026-04-20T16:38:09.121Z" }, + { url = "https://files.pythonhosted.org/packages/45/ef/7fe366c84952619ee2f69973566c214775e083dd4df465751912153e4b72/pymongo-4.17.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9eb5d63a3c518cb0804ed678f5e2b875af032d89a7cf57a57360322cf6a4d222", size = 1864881, upload-time = "2026-04-20T16:38:10.896Z" }, + { url = "https://files.pythonhosted.org/packages/2f/35/b577d82c6d1be7aee7ac7e249bc86f7847998345042e5f8360de238e177b/pymongo-4.17.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e97e03fa13327c87e3fdc5656acd01e71817f0c1dc3221cd8f30de136bf4ec3", size = 1800349, upload-time = "2026-04-20T16:38:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/b8/69/dafcf04f66e130ddd91aeb92e7a692480eda46dcd04ec1dbe82c06619e10/pymongo-4.17.0-cp312-cp312-win32.whl", hash = "sha256:6877214bff5f06f6884a9fc8d9016a4a7a5f51f537f5c51ac3a576f93e7dfb32", size = 900518, upload-time = "2026-04-20T16:38:15.541Z" }, + { url = "https://files.pythonhosted.org/packages/11/35/5c9262a459f988b4eb2605f70815240b77a0d4131136c4326d18f1822b89/pymongo-4.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:9828485f72f63c7d802e0ec41f71906f633c2692621ab3af55ca990186b091b1", size = 920335, upload-time = "2026-04-20T16:38:17.665Z" }, + { url = "https://files.pythonhosted.org/packages/8d/da/e9c7265ee176faccf4e52c4797837e794d93569a1046f6b19a4acc36e5ad/pymongo-4.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:1195370a77baf003b59b10e91ecc4706297197f0dd9d29c840cc556dc08f7cee", size = 903289, upload-time = "2026-04-20T16:38:19.33Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6b/c1206879708b94e82fcd8b9653440ec271f79a3674d122192df383047f5a/pymongo-4.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:809ec74de3b9148ae43fa8df9faf53470f511c8d384f13b99d6f671f2a379f15", size = 985829, upload-time = "2026-04-20T16:38:21.031Z" }, + { url = "https://files.pythonhosted.org/packages/cb/cf/bb044ed85160e5c40f568c7c4f4e8ea16f40764ff5d302e5befbe8f6f814/pymongo-4.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a431b737816bf4cddd4fa0fcef04e424ad36b7692734a64150f872fb8f3208be", size = 985899, upload-time = "2026-04-20T16:38:23.409Z" }, + { url = "https://files.pythonhosted.org/packages/74/0a/f6dfd5ea3901e5d6888da8de8ba728971a1d447debab681cfc56f90d1208/pymongo-4.17.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e4fab10f8403169ce92f3cea921609d9ee81107306caae06c08f592d4b8ad2b5", size = 2028569, upload-time = "2026-04-20T16:38:25.343Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c5/081f59a1c02ae8c0dc73ae58e563838c44eec81aeafa7d0b93a637841c9b/pymongo-4.17.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:20323b0b1c1d33770ad1fc68d429c757734ce9ad3594421c3d6618f10572b1b9", size = 2072916, upload-time = "2026-04-20T16:38:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/31/42/6e41d434297ffe8b30d9c3717916591a4a7be9075a0dcc2fafdfaaaa62ed/pymongo-4.17.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5a5de048e6da5c18e27cc2437e8c15b3b0cdc8385c15b41178b0caa3322a09c2", size = 2173234, upload-time = "2026-04-20T16:38:29.474Z" }, + { url = "https://files.pythonhosted.org/packages/3d/cf/1e4a7db352ef9485831c7268dfe8402f0117b32a9ad54b16e810699e3617/pymongo-4.17.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dff3de1294fbbc1db0ba6b511f77b8e540601d092538a31312e99c8a91a78b1e", size = 2156784, upload-time = "2026-04-20T16:38:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/12/10/6195be29962a61ebb5f4bd9e4c7519890b172f7968a0a0d880398c6ddb02/pymongo-4.17.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:faf03e4c2aafd6de626dbd30ba246d369ae33f47f10629d1bbe40f72115027a6", size = 2074446, upload-time = "2026-04-20T16:38:34.004Z" }, + { url = "https://files.pythonhosted.org/packages/37/48/33410b8819837ed370c738587306bdf060b59cef11823be212f4a07703c5/pymongo-4.17.0-cp313-cp313-win32.whl", hash = "sha256:c9786665926a09630c5d420c79762cfadbff35a9438bcbc4c81a9fb5ab9228b7", size = 948435, upload-time = "2026-04-20T16:38:35.922Z" }, + { url = "https://files.pythonhosted.org/packages/6f/77/c0ed522f798a286b99acaa7914ed8d9c80ab091f97f57c59ffed72906e5e/pymongo-4.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:5960519b4d7168f1ecdd3ea10c81b2aedeb9423651aca953cfbc8e76705d3b38", size = 972847, upload-time = "2026-04-20T16:38:37.888Z" }, + { url = "https://files.pythonhosted.org/packages/97/f0/c39480a2db385fde23861d0c8acda41cdaf1d43e46579db72c5c013a2e81/pymongo-4.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:0ff6bd2f735ab5356541e3e57d5b7dbfbc3f2ee1ccb10b6b0f82d58af69d1d8e", size = 951575, upload-time = "2026-04-20T16:38:40.544Z" }, ] [[package]] @@ -7047,31 +6983,31 @@ wheels = [ [[package]] name = "pypdfium2" -version = "5.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/76/19aacfff78d328a700ca34b5b1dff891e587aac2fd6b928b035ed366cc37/pypdfium2-5.7.0.tar.gz", hash = "sha256:9febb09f532555485f064c1f6442f46d31e27be5981359cb06b5826695906a06", size = 265935, upload-time = "2026-04-08T19:58:16.831Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/a5/7e6d9532e7753a1dc439412b38dda5943c692d3ab3f1e01826f9b5527c67/pypdfium2-5.7.0-py3-none-android_23_arm64_v8a.whl", hash = "sha256:9e815e75498a03a3049baf68ff00b90459bead0d9eee65b1860142529faba81d", size = 3343748, upload-time = "2026-04-08T19:57:40.293Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ea/9d4a0b41f86d342dfb6529c31789e70d1123cc6521b29979e02ec2b267b6/pypdfium2-5.7.0-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:405bb3c6d0e7a5a32e98eb45a3343da1ad847d6d6eef77bf6f285652a250e0b7", size = 2805480, upload-time = "2026-04-08T19:57:42.109Z" }, - { url = "https://files.pythonhosted.org/packages/34/dc/ce1c8e94082a84d1669606f90c4f694acbdcabd359d92db7302d16b5938b/pypdfium2-5.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:609b34d91871c185f399b1a503513c03a9de83597f55404de00c3d31a8037544", size = 3420156, upload-time = "2026-04-08T19:57:43.672Z" }, - { url = "https://files.pythonhosted.org/packages/51/84/6d859ce82a3723ba7cd70d88ad87eca3cb40553c68db182976fd2b0febe1/pypdfium2-5.7.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:6ae6c6bba0cde30c9293c3f525778c229466de7782e8f7d99e7c2a1b8f9c7a6f", size = 3601560, upload-time = "2026-04-08T19:57:45.148Z" }, - { url = "https://files.pythonhosted.org/packages/66/0c/8bc2258d1e7ba971d05241a049cd3100c75df6bcf930423de7d0c6265a30/pypdfium2-5.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b518d78211cb2912139d10d7f4e39669231eb155e8258159e3413e9e5e4baef", size = 3588134, upload-time = "2026-04-08T19:57:47.379Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f7/3248cc569a92ff25f1fe0a4a1790807e6e05df60563e39e74c9b723d5620/pypdfium2-5.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aaa8e7681ebcaa042ac8adc152521fd5f16a4ceee1e9b9b582e148519528aa9", size = 3323100, upload-time = "2026-04-08T19:57:49.243Z" }, - { url = "https://files.pythonhosted.org/packages/0d/ee/6f004509df77ce963ed5a0f2e090ea0c43036e49cc72c321ce90f3d328bf/pypdfium2-5.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d2284f799adbae755b66ce1a579834e487337d89bbb34ee749ecfa68322425", size = 3719217, upload-time = "2026-04-08T19:57:50.708Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f0/bb61601aa1c2990d4a5d194440281941781250f6a438813a13fe20eb95cf/pypdfium2-5.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08e9e9576eefbc085ba9a63feede4bcaf93d9fa0d9b17cb549aba6f065a8750e", size = 4147676, upload-time = "2026-04-08T19:57:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/bd/27/a119e0519049afcfca51e9834b67949ffaba5b9afe7e74ed04d6c39b0285/pypdfium2-5.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ace647320bae562903097977b83449f91d30e045dd19ce62939d3100869f180", size = 3635469, upload-time = "2026-04-08T19:57:53.948Z" }, - { url = "https://files.pythonhosted.org/packages/70/0b/4bcb67b039f057aca01ddbe692ae7666b630ad42b91a3aca3cb4d4f01222/pypdfium2-5.7.0-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7bb7555fe613cd76fff871a12299f902b80443f90b49e2001338718c758f6f4", size = 3091818, upload-time = "2026-04-08T19:57:55.471Z" }, - { url = "https://files.pythonhosted.org/packages/a6/c9/31490ab7cecaf433195683ff5c750f4111c7347f1fef9131d3d8704618eb/pypdfium2-5.7.0-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e7c0ef5ae35d40daa1883f3993b3b7ecf3fb06993bcc46651e28cf058d9da992", size = 2959579, upload-time = "2026-04-08T19:57:57.238Z" }, - { url = "https://files.pythonhosted.org/packages/f9/1e/bf5fe52f007130c0b1b38786ef82c98b4ac06f77e7ca001a17cda6ce76b6/pypdfium2-5.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:423c749e8cab22ddaf833041498ec5ad477c1c2abbff0a8ec00b99663c284592", size = 4126033, upload-time = "2026-04-08T19:57:59.111Z" }, - { url = "https://files.pythonhosted.org/packages/18/7d/46dcebf4eb9ccf9b5fafe79702c31863b4c127e9c3140c0f335c375d3818/pypdfium2-5.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f48f453f848a90ec7786bcc84a4c0ee42eb84c2d8af3ca9004f7c18648939838", size = 3742063, upload-time = "2026-04-08T19:58:00.643Z" }, - { url = "https://files.pythonhosted.org/packages/4d/29/cfec37942f13a1dfe3ab059cf8d130609143d33ca1dd554b017a30bffe97/pypdfium2-5.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e84bfa61f0243ed4b33bfe2492946ba761007b7feb5e7e0a086c635436d47906", size = 4332177, upload-time = "2026-04-08T19:58:02.425Z" }, - { url = "https://files.pythonhosted.org/packages/3f/da/07812153eff746bbc548d50129ada699765036674ff94065d538015c9556/pypdfium2-5.7.0-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:e3f4d7f4473b5ef762560cd5971cad3b51a77da3a25af479ef5aae4611709bb8", size = 4370704, upload-time = "2026-04-08T19:58:04.379Z" }, - { url = "https://files.pythonhosted.org/packages/9b/df/07a6a038ccb6fae6a1a06708c98d00aa03f2ca720b02cd3b75248dc5da70/pypdfium2-5.7.0-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:9e0b6c9be8c92b63ce0a00a94f6635eec22831e253811d6692824a1244e21780", size = 3924428, upload-time = "2026-04-08T19:58:06.406Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a8/70ce4f997fef4186098c032fb3dd2c39193027a92a23b5a94d7a4c85e068/pypdfium2-5.7.0-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:3e4974a8545f726fc97a7443507713007e177f22058cd1ca0b28cb0e8e2d7dc2", size = 4264817, upload-time = "2026-04-08T19:58:08.003Z" }, - { url = "https://files.pythonhosted.org/packages/02/42/03779e61ca40120f87839b4693899c72031b7a9e23676dcd8914d92e460c/pypdfium2-5.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2fe12d57a0b413d42bdba435a608b2435a921a5f6a9d78fd8091b6266b63901a", size = 4175393, upload-time = "2026-04-08T19:58:09.858Z" }, - { url = "https://files.pythonhosted.org/packages/ee/f1/19bea36b354f2407c6ffdc60ad8564d95eb515badec457043ff57ad636f0/pypdfium2-5.7.0-py3-none-win32.whl", hash = "sha256:23958aec5c28c52e71f183a647fcc9fcec96ef703cc60a3ade44e55f4701678f", size = 3606308, upload-time = "2026-04-08T19:58:11.672Z" }, - { url = "https://files.pythonhosted.org/packages/70/aa/fb333c1912a019de26e2395afd3dbef09e8118a59d70f1e5886fc90aa565/pypdfium2-5.7.0-py3-none-win_amd64.whl", hash = "sha256:a33d2c190042ae09c5512f599a540f88b07be956f18c4bb49c027e8c5118ce44", size = 3726429, upload-time = "2026-04-08T19:58:13.374Z" }, - { url = "https://files.pythonhosted.org/packages/86/cf/6d4bc1ae4466a1f223abfe27210dce218da307e921961cd687f6e5a795a0/pypdfium2-5.7.0-py3-none-win_arm64.whl", hash = "sha256:8233fd06b0b8c22a5ea0bccbd7c4f73d6e9d0388040ea51909a5b2b1f63157e8", size = 3519317, upload-time = "2026-04-08T19:58:15.261Z" }, +version = "5.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/13/ee794b8a810b7226426c8b50d6c28637c059e7da0caf9936164f352ef858/pypdfium2-5.7.1.tar.gz", hash = "sha256:3b3b20a56048dbe3fd4bf397f9bec854c834668bc47ef6a7d9041b23bb04317b", size = 266791, upload-time = "2026-04-20T15:01:02.598Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/f7/e87ba0eec9cd4e9eedd4bbb867515da970525ca8c105dd5e254758216ee3/pypdfium2-5.7.1-py3-none-android_23_arm64_v8a.whl", hash = "sha256:8008f45e8adc4fc1ec2a51e018e01cd0692d4859bdbb28e88be221804f329468", size = 3367033, upload-time = "2026-04-20T15:00:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/f6/e1/a4b9be9a09fa9857958357ced51afb25518f6a48e4e68fdc9a091f0f2259/pypdfium2-5.7.1-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:892fcb5a618f5f551fffdb968ac2d64911953c3ba0f9aa628239705af68dbe15", size = 2824449, upload-time = "2026-04-20T15:00:24.913Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5d/c91abb2610316a1622f86ddf706fcd04d34c7e6923c3fa8fa145c8f7a372/pypdfium2-5.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7431847d45dedc3c7ffede15b58ac611e996a0cdcd61318a0190d46b9980ac2b", size = 3443730, upload-time = "2026-04-20T15:00:26.664Z" }, + { url = "https://files.pythonhosted.org/packages/50/8b/b9eefed83d6a0a59384ee64d25c1515e831c234c3ed6b8c6dfc8f99f4875/pypdfium2-5.7.1-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:548bd09c9f97565ae8ddba30bb65823cbf791b84e4cdb63ed582aec2c289dbe2", size = 3626483, upload-time = "2026-04-20T15:00:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/5b/98/6d62723e1f58d66e7e0073c4f12048f9d5dcd478369da0990db08e677dd5/pypdfium2-5.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18a15ad0918acc3ea98778394f0331b9ad2a1b7384ab3d8d8c63422ffd01ed13", size = 3610098, upload-time = "2026-04-20T15:00:30.344Z" }, + { url = "https://files.pythonhosted.org/packages/0b/4a/f72b42578f30971c29915e33ee598ed451aa6f0c2808a71526c1b81afd8d/pypdfium2-5.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1df04564659d807fb38810d9bd1ac18419d8acbb5f87f2cb20675d7332635b18", size = 3340119, upload-time = "2026-04-20T15:00:32.19Z" }, + { url = "https://files.pythonhosted.org/packages/0d/64/de69c5feed470617f243e61cac841bfd1b5273d575c3d3b49b27f738e334/pypdfium2-5.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a146d036a6b085a406aa256548b827b63016714fd77f8e11b7f704c1175e8cc", size = 3738864, upload-time = "2026-04-20T15:00:33.798Z" }, + { url = "https://files.pythonhosted.org/packages/07/ce/69ff10766565c5ffcb66cebe780ce3bc4fe7cc16b218df8c240075881c66/pypdfium2-5.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3397b0d705b6858c87dec1dc9c44d4c7094601a9b231097f441b64d1a7d5ff0b", size = 4169839, upload-time = "2026-04-20T15:00:35.973Z" }, + { url = "https://files.pythonhosted.org/packages/03/4b/fff16a831a6f07aad02da0d02b620c455310b8bf4e2642909175dcb7ccae/pypdfium2-5.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc2cdf603ac766b91b7c1b455197ec1c3471089d75f999b046edb65ed6cedd80", size = 3657630, upload-time = "2026-04-20T15:00:38.407Z" }, + { url = "https://files.pythonhosted.org/packages/9b/58/d3148917616164cfad347b0b509342737ed80e060afab07523ffeac2a05f/pypdfium2-5.7.1-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b1a6a5f3320b59138e7570a3f78840540383d058ac180a9a21f924ad3bd7f83", size = 3088898, upload-time = "2026-04-20T15:00:40.109Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1d/387ca4dfe9865a8d61114dae2debba4d86eed07cdc6a31c5527a049583be/pypdfium2-5.7.1-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:91b809c40a5fc248107d13fbcf1dd2c64dbc8e572693a9b93e350bf31efda92b", size = 2955404, upload-time = "2026-04-20T15:00:41.921Z" }, + { url = "https://files.pythonhosted.org/packages/ad/87/4afc2bfe35d71942f1bf9e774086f74af66a0a4e56338f39a7cbc5b8721c/pypdfium2-5.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85611ef61cbc0f5e04de8f99fec0f3db3920b09f46c62afa08c9caa21a74b353", size = 4126600, upload-time = "2026-04-20T15:00:44.079Z" }, + { url = "https://files.pythonhosted.org/packages/b1/c4/872eef4cb8f0d8ebbf967ca713254ac71c75878a1d5798bc2b8d23104e52/pypdfium2-5.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b2764ab909f9b444d4e643be90b064c4053e6828c28bfd47639fc84526ba244d", size = 3742636, upload-time = "2026-04-20T15:00:46.009Z" }, + { url = "https://files.pythonhosted.org/packages/10/6d/3805a53623a72e20b68e6814b37582994298b231628656ff227fa1158a1f/pypdfium2-5.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fcea3cc20b7cca7d84ceee68b9c6ef7fe773fb71c145542769dc2ceb27e9698a", size = 4332743, upload-time = "2026-04-20T15:00:47.829Z" }, + { url = "https://files.pythonhosted.org/packages/92/61/3e3f8ae7ad04400bc3c6a75bbf59db500eaf9dff05477d1b25ff4a36363b/pypdfium2-5.7.1-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:f04546bc314973397148805d44f8e660e81aa80c2a87e12afb892c11493ded6c", size = 4377471, upload-time = "2026-04-20T15:00:49.443Z" }, + { url = "https://files.pythonhosted.org/packages/8d/e0/1026f297b5be292cae7095aa4814d57faa3faba0b49552afcaa11a1c2e4e/pypdfium2-5.7.1-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:66275c8a854969bdf905abc7599e5623d62739c44604d69788ff5457082d275b", size = 3919215, upload-time = "2026-04-20T15:00:51.2Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5d/7d6d5b392fa42a997aadf127e3b2c25739199141054b33f759ba5d02e653/pypdfium2-5.7.1-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:bbed8f32040ce3b3236a512265976017c2465ea6643a1730f008b39e0339b8ce", size = 4263089, upload-time = "2026-04-20T15:00:53.105Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b8/d51bd4a1d426fa5b99d4516c77cc1892a8fbfd5a93a823e2679cf9b09ee0/pypdfium2-5.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c55d3df09bd0d72a1d192107dcbf80bcb2791662a3eca3b084001f947d3040d5", size = 4175967, upload-time = "2026-04-20T15:00:54.757Z" }, + { url = "https://files.pythonhosted.org/packages/30/52/06a6358856374ae4400ee1ad0ddaa01d5c31fcd6e8f4577e6a3ed1c40343/pypdfium2-5.7.1-py3-none-win32.whl", hash = "sha256:4f6bbe1211c5883c8fc9ce11008347e5b96ec6571456d959ae289cecdb2867f0", size = 3629154, upload-time = "2026-04-20T15:00:56.916Z" }, + { url = "https://files.pythonhosted.org/packages/6f/13/e0dbc9377d976d8b03ed0dd07fe9892e06d09fcf4f6a0e66df49366227d7/pypdfium2-5.7.1-py3-none-win_amd64.whl", hash = "sha256:fdf117af26bd310f4f176b3cf0e2e23f0f800e48dcf2bcf6c2cca0de3326f5cb", size = 3747295, upload-time = "2026-04-20T15:00:59.15Z" }, + { url = "https://files.pythonhosted.org/packages/bc/67/4759522f5bca0ac4cda9f42c7f3f818aa826568793bd8b4532d2d2ffa515/pypdfium2-5.7.1-py3-none-win_arm64.whl", hash = "sha256:622821698fcc30fc560bd4eead6df9e6b846de9876b82861bed0091c09a4c27b", size = 3540903, upload-time = "2026-04-20T15:01:00.994Z" }, ] [[package]] @@ -7278,11 +7214,11 @@ wheels = [ [[package]] name = "python-iso639" -version = "2026.1.31" +version = "2026.4.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a3/da/701fc47ea3b0579a8ae489d50d5b54f2ef3aeb7768afd31db1d1cfe9f24e/python_iso639-2026.1.31.tar.gz", hash = "sha256:55a1612c15e5fbd3a1fa269a309cbf1e7c13019356e3d6f75bb435ed44c45ddb", size = 174144, upload-time = "2026-01-31T15:04:48.105Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/c8/22c80367213029ea3abc4e7ab6e1ed8545542f98e5db6e1ab4f2973890ad/python_iso639-2026.4.20.tar.gz", hash = "sha256:00570376d24788f889578991bb2ad93c030a014c1d373f64f2ceffe84732a537", size = 173955, upload-time = "2026-04-20T14:15:47.748Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/3a/03ee682b04099e6b02b591955851b0347deb2e3691ae850112000c54ba12/python_iso639-2026.1.31-py3-none-any.whl", hash = "sha256:b2c48fa1300af1299dff4f1e1995ad1059996ed9f22270ea2d6d6bdc5fb03d4c", size = 167757, upload-time = "2026-01-31T15:04:46.458Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/520fbac49c0650aba66093396282e1e4a1315a7242461c21480132a1b0df/python_iso639-2026.4.20-py3-none-any.whl", hash = "sha256:60a380571fafdbcc6190c5c1ee78e217194332cbe3caec76345327712e5a65cb", size = 167842, upload-time = "2026-04-20T14:15:46.308Z" }, ] [[package]] @@ -7420,8 +7356,7 @@ name = "qdrant-client" version = "1.14.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, + { name = "grpcio" }, { name = "httpx", extra = ["http2"] }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -7867,14 +7802,14 @@ wheels = [ [[package]] name = "s3transfer" -version = "0.16.0" +version = "0.16.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/29/af14f4ef3c11a50435308660e2cc68761c9a7742475e0585cd4396b91777/s3transfer-0.16.1.tar.gz", hash = "sha256:8e424355754b9ccb32467bdc568edf55be82692ef2002d934b1311dbb3b9e524", size = 154801, upload-time = "2026-04-22T20:36:06.475Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, + { url = "https://files.pythonhosted.org/packages/03/19/90d7d4ed51932c022d53f1d02d564b62d10e272692a1f9b76425c1ad2a02/s3transfer-0.16.1-py3-none-any.whl", hash = "sha256:61bcd00ccb83b21a0fe7e91a553fff9729d46c83b4e0106e7c314a733891f7c2", size = 86825, upload-time = "2026-04-22T20:36:04.992Z" }, ] [[package]] @@ -8032,24 +7967,49 @@ wheels = [ [[package]] name = "scrapegraph-py" -version = "1.46.0" +version = "1.47.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.11.*' and platform_machine != 's390x'", + "python_full_version == '3.11.*' and platform_machine == 's390x'", + "python_full_version < '3.11' and platform_machine != 's390x'", + "python_full_version < '3.11' and platform_machine == 's390x'", +] dependencies = [ - { name = "aiohttp" }, - { name = "beautifulsoup4" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "requests" }, - { name = "toonify" }, + { name = "aiohttp", marker = "python_full_version < '3.12'" }, + { name = "beautifulsoup4", marker = "python_full_version < '3.12'" }, + { name = "pydantic", marker = "python_full_version < '3.12'" }, + { name = "python-dotenv", marker = "python_full_version < '3.12'" }, + { name = "requests", marker = "python_full_version < '3.12'" }, + { name = "toonify", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/b4/9196574ac53c6c94fb311824e3f6e5e13191620fb9a09b056daf0e77a19d/scrapegraph_py-1.47.0.tar.gz", hash = "sha256:4794820d9dcdba2c6ee22b4ad0975843a10adb65e4831e680f846067e13c5aa9", size = 340039, upload-time = "2026-04-18T13:50:01.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/60/fefc1221b49b3910cd91751c98f14d5a4a5ea6d0fb8e51b63e246bdd1acf/scrapegraph_py-1.47.0-py3-none-any.whl", hash = "sha256:7edb0c095a6d6280c05aee22503672350ed9dd7c289354affeee06dc3323819c", size = 50968, upload-time = "2026-04-18T13:49:59.773Z" }, +] + +[[package]] +name = "scrapegraph-py" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13' and platform_machine != 's390x'", + "python_full_version >= '3.13' and platform_machine == 's390x'", + "python_full_version == '3.12.*' and platform_machine != 's390x'", + "python_full_version == '3.12.*' and platform_machine == 's390x'", +] +dependencies = [ + { name = "httpx", marker = "python_full_version >= '3.12'" }, + { name = "pydantic", marker = "python_full_version >= '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/3c/573fd78a01d27af4bae28134129eaf81b5dd270cb6fbd5229833298a8058/scrapegraph_py-1.46.0.tar.gz", hash = "sha256:95cab89d63b1d5809bb96ddabd3dffc53f16dc9b92dda2d642e9155c3db2806d", size = 327431, upload-time = "2026-01-26T13:59:24.237Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/4b/8b165cfb0d6b564cc69e4f58270bb54f2457c68d5c0fc648d547e3d0e207/scrapegraph_py-2.1.0.tar.gz", hash = "sha256:c4d1ed4d0c11c5c10e999d310ce1146f62809a91292b3d07d69b40c2a0954d75", size = 4876208, upload-time = "2026-04-21T12:53:48.428Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/22/21562bc98c8439df50e4b837f4110f374b504e3482df15d6a67b164b3c23/scrapegraph_py-1.46.0-py3-none-any.whl", hash = "sha256:c0cc1f73dcd25429c42a079bb541f06d101d63ac15f2f1d881b0026567bdb6c8", size = 49297, upload-time = "2026-01-26T13:59:21.607Z" }, + { url = "https://files.pythonhosted.org/packages/2e/22/1d4b1430af1c007b095c4a4bd2af062d581e87be89a36166ab318d3e5224/scrapegraph_py-2.1.0-py3-none-any.whl", hash = "sha256:9bd9a6f6c30d44d362afc873aba61fd8cf066c62fe8dd83f0eb16f3d666c66ff", size = 15590, upload-time = "2026-04-21T12:53:43.662Z" }, ] [[package]] name = "scrapfly-sdk" -version = "0.10.0" +version = "0.10.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -8059,9 +8019,9 @@ dependencies = [ { name = "requests" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/7e/3dd57ac5b80c997fd9ee54a67b9a035eb2170a7fa8f5afa8486179401702/scrapfly_sdk-0.10.0.tar.gz", hash = "sha256:4b14a1a448b723771cbc9dba8bc07394c330028cfa77f656e9c182e7b8ab46ea", size = 105048, upload-time = "2026-04-15T17:31:10.335Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/82/cdc91c753c5c8b1bc0f17242e2dc94785e6c5191d3a18e903d1e5fef4561/scrapfly_sdk-0.10.2.tar.gz", hash = "sha256:53ae3263e365eb5f9602458d0207ab8aabf4315b0a2a439ef3e76be1156245d9", size = 111333, upload-time = "2026-04-22T16:32:20.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/9e/d6ebd1b3343bb966dabfe0191578db060417ce6d038c4a24ab96bf2a239f/scrapfly_sdk-0.10.0-py3-none-any.whl", hash = "sha256:26599ee9526196f531aa7e07d03bd6dfdd4172c470caf7ee0b56ce3d001d1768", size = 118828, upload-time = "2026-04-15T17:31:08.905Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4c/4d9b6d1a75a8b86ae011f74e6adc402490d9a54568662807fdf036bcaa17/scrapfly_sdk-0.10.2-py3-none-any.whl", hash = "sha256:c3ed2c1f2fce04ecdfd585be8105bd33a01e957e71a1ca81b7bb566df6106105", size = 125802, upload-time = "2026-04-22T16:32:18.859Z" }, ] [[package]] @@ -8503,7 +8463,7 @@ wheels = [ [[package]] name = "textual" -version = "8.2.3" +version = "8.2.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py", extra = ["linkify"] }, @@ -8513,9 +8473,9 @@ dependencies = [ { name = "rich" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/2f/d44f0f12b3ddb1f0b88f7775652e99c6b5a43fd733badf4ce064bdbfef4a/textual-8.2.3.tar.gz", hash = "sha256:beea7b86b03b03558a2224f0cc35252e60ef8b0c4353b117b2f40972902d976a", size = 1848738, upload-time = "2026-04-05T09:12:45.338Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/89/bec5709fb759f9c784bbcb30b2e3497df3f901691d13c2b864dbf6694a17/textual-8.2.4.tar.gz", hash = "sha256:d4e2b2ddd7157191d00b228592b7c739ea080b7d792fd410f23ca75f05ea76c4", size = 1848933, upload-time = "2026-04-19T04:20:45.845Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/28/a81d6ce9f4804818bd1231a9a6e4d56ea84ebbe8385c49591444f0234fa2/textual-8.2.3-py3-none-any.whl", hash = "sha256:5008ac581bebf1f6fa0520404261844a231e5715fdbddd10ca73916a3af48ca2", size = 724231, upload-time = "2026-04-05T09:12:48.747Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/02932f0d597cdbb34e34bf24266ff0f2cf292ccb3aafc37dd9efcb0cc416/textual-8.2.4-py3-none-any.whl", hash = "sha256:a83bd3f0cc7125ca203845af753f9d6b6be030025ecd1b05cc75ebe645b9c4ba", size = 724390, upload-time = "2026-04-19T04:20:49.968Z" }, ] [[package]] @@ -8650,7 +8610,7 @@ name = "toonify" version = "1.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "tiktoken" }, + { name = "tiktoken", marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ce/53/409a1dd7bcb52c74da019994cb866e875d0bf9020b89c7fcfcdea2866ce3/toonify-1.6.0.tar.gz", hash = "sha256:57bf6fbc9d73e463e8773c491123b233b0c79482235e0c27b908b4e58b54ec77", size = 30106, upload-time = "2026-02-06T16:00:02.622Z" } wheels = [ @@ -8748,7 +8708,7 @@ wheels = [ [[package]] name = "transformers" -version = "5.5.4" +version = "5.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, @@ -8762,9 +8722,9 @@ dependencies = [ { name = "tqdm" }, { name = "typer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/1e/1e244ab2ab50a863e6b52cc55761910567fa532b69a6740f6e99c5fdbd98/transformers-5.5.4.tar.gz", hash = "sha256:2e67cadba81fc7608cc07c4dd54f524820bc3d95b1cabd0ef3db7733c4f8b82e", size = 8227649, upload-time = "2026-04-13T16:55:55.181Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/14/e3eb58bd4c9df45af154f9de59a6e66a2ece30dd64434c710e40e5a4b1f1/transformers-5.6.0.tar.gz", hash = "sha256:291951976b79a6f93ec06d6ab14489a99aecdbc8f05aaabab538ea1d508c9a97", size = 8311711, upload-time = "2026-04-22T15:42:03.393Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/fb/162a66789c65e5afa3b051309240c26bf37fbc8fea285b4546ae747995a2/transformers-5.5.4-py3-none-any.whl", hash = "sha256:0bd6281b82966fe5a7a16f553ea517a9db1dee6284d7cb224dfd88fc0dd1c167", size = 10236696, upload-time = "2026-04-13T16:55:51.497Z" }, + { url = "https://files.pythonhosted.org/packages/e8/80/ac700df26a83969d2cf8d09d3dd191bb9bbe8156fa4607e614438b1da8c8/transformers-5.6.0-py3-none-any.whl", hash = "sha256:def5aac47b28e2f1386fa64f2f458a5474ba7733ab74c1765e7c67a79d1f1cbd", size = 10364790, upload-time = "2026-04-22T15:41:58.723Z" }, ] [[package]] @@ -8805,17 +8765,18 @@ wheels = [ [[package]] name = "tree-sitter-c" -version = "0.24.1" +version = "0.24.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/f5/ba8cd08d717277551ade8537d3aa2a94b907c6c6e0fbcf4e4d8b1c747fa3/tree_sitter_c-0.24.1.tar.gz", hash = "sha256:7d2d0cda0b8dda428c81440c1e94367f9f13548eedca3f49768bde66b1422ad6", size = 228014, upload-time = "2025-05-24T17:32:58.384Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/c9/3834f3d9278251aea7312274971bc4c45b17aec2490fd4b884d93bd7019a/tree_sitter_c-0.24.2.tar.gz", hash = "sha256:1628584df0299b5a340aa63f8e67b6c97c91517f52fa7e7a4c557e40adb330a9", size = 228397, upload-time = "2026-04-22T08:06:14.491Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/c7/c817be36306e457c2d36cc324789046390d9d8c555c38772429ffdb7d361/tree_sitter_c-0.24.1-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9c06ac26a1efdcc8b26a8a6970fbc6997c4071857359e5837d4c42892d45fe1e", size = 80940, upload-time = "2025-05-24T17:32:49.967Z" }, - { url = "https://files.pythonhosted.org/packages/7a/42/283909467290b24fdbc29bb32ee20e409a19a55002b43175d66d091ca1a4/tree_sitter_c-0.24.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:942bcd7cbecd810dcf7ca6f8f834391ebf0771a89479646d891ba4ca2fdfdc88", size = 86304, upload-time = "2025-05-24T17:32:51.271Z" }, - { url = "https://files.pythonhosted.org/packages/94/53/fb4f61d4e5f15ec3da85774a4df8e58d3b5b73036cf167f0203b4dd9d158/tree_sitter_c-0.24.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a74cfd7a11ca5a961fafd4d751892ee65acae667d2818968a6f079397d8d28c", size = 109996, upload-time = "2025-05-24T17:32:52.119Z" }, - { url = "https://files.pythonhosted.org/packages/5e/e8/fc541d34ee81c386c5453c2596c1763e8e9cd7cb0725f39d7dfa2276afa4/tree_sitter_c-0.24.1-cp310-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6a807705a3978911dc7ee26a7ad36dcfacb6adfc13c190d496660ec9bd66707", size = 98137, upload-time = "2025-05-24T17:32:53.361Z" }, - { url = "https://files.pythonhosted.org/packages/32/c6/d0563319cae0d5b5780a92e2806074b24afea2a07aa4c10599b899bda3ec/tree_sitter_c-0.24.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:789781afcb710df34144f7e2a20cd80e325114b9119e3956c6bd1dd2d365df98", size = 94148, upload-time = "2025-05-24T17:32:54.855Z" }, - { url = "https://files.pythonhosted.org/packages/50/5a/6361df7f3fa2310c53a0d26b4702a261c332da16fa9d801e381e3a86e25f/tree_sitter_c-0.24.1-cp310-abi3-win_amd64.whl", hash = "sha256:290bff0f9c79c966496ebae45042f77543e6e4aea725f40587a8611d566231a8", size = 84703, upload-time = "2025-05-24T17:32:56.084Z" }, - { url = "https://files.pythonhosted.org/packages/22/6a/210a302e8025ac492cbaea58d3720d66b7d8034c5d747ac5e4d2d235aa25/tree_sitter_c-0.24.1-cp310-abi3-win_arm64.whl", hash = "sha256:d46bbda06f838c2dcb91daf767813671fd366b49ad84ff37db702129267b46e1", size = 82715, upload-time = "2025-05-24T17:32:57.248Z" }, + { url = "https://files.pythonhosted.org/packages/28/c1/26ed17730ec2c17bedc1b673349e5e0a466c578e3eb0327c3b73cf52bf97/tree_sitter_c-0.24.2-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:4d4579a8b54f0a442f903d88d3304cab77cd5c2031d4015baa4f2f8e15d6dcb7", size = 81016, upload-time = "2026-04-22T08:06:07.208Z" }, + { url = "https://files.pythonhosted.org/packages/c1/1c/1140db75e7e375cda3c68792a33826c4fd40b5b98c3259d93c75f6c8368f/tree_sitter_c-0.24.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:97bc80a224d48215d4e6e6376bf30d114f4c317b8145ff1b02afe785d4ba7bdd", size = 86213, upload-time = "2026-04-22T08:06:08.136Z" }, + { url = "https://files.pythonhosted.org/packages/e9/8c/0dfb88d726f8821d1c4c36042f092be974a800afd734307a595b8604190c/tree_sitter_c-0.24.2-cp310-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5041ef67eb68ce6bc8bb0b1f8ef3a5585ce523dae0c7eec109ab0627dd75aede", size = 94264, upload-time = "2026-04-22T08:06:08.918Z" }, + { url = "https://files.pythonhosted.org/packages/87/78/47dc570e7aee6b0a1ecc2520b30639cc2b06003154c9ab0672d86bf720d5/tree_sitter_c-0.24.2-cp310-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c098bedcd5ac86ff93fa734d51d1dd86aed40fd5ed7d634c7af11380a0469969", size = 94560, upload-time = "2026-04-22T08:06:09.852Z" }, + { url = "https://files.pythonhosted.org/packages/29/37/75d59d3f74f4cfc00f04472917e933d8a9c9fdc6eff980ef9552e010e6aa/tree_sitter_c-0.24.2-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:82842c5a5f2acd93f4de10038c33ac179c8979defc39376f990348d6289e933b", size = 94023, upload-time = "2026-04-22T08:06:10.682Z" }, + { url = "https://files.pythonhosted.org/packages/64/57/8fc655d5a446a70a637e92b98bd2fdaab88bf5bb5b36076ac4add544808d/tree_sitter_c-0.24.2-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2b42e8e22202c251f8629306f9321233542e07a6e01611b5fe83489272143eb", size = 94160, upload-time = "2026-04-22T08:06:11.497Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f7/72a1d6b42dd31fd37e03ff67e7dc5ee572301499e6b216002b8dd42a1714/tree_sitter_c-0.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:abb549225091f7b25df2dd3a0143ece6e208f7055d8bcb4700b41ee79b9ef1e1", size = 84669, upload-time = "2026-04-22T08:06:12.347Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9d/7475d9ae8ef679aa36c7dfe6c903ab78e573651c68b6ef9862d6a3f994db/tree_sitter_c-0.24.2-cp310-abi3-win_arm64.whl", hash = "sha256:4a2f4371cd816cc3153458f69062135ebb2ea5f275ddd90494e5c823d778204a", size = 82956, upload-time = "2026-04-22T08:06:13.364Z" }, ] [[package]] @@ -9121,7 +9082,7 @@ all-docs = [ { name = "python-docx" }, { name = "python-pptx" }, { name = "unstructured-inference", version = "1.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, - { name = "unstructured-inference", version = "1.6.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, + { name = "unstructured-inference", version = "1.6.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, { name = "unstructured-pytesseract" }, { name = "xlrd" }, ] @@ -9145,7 +9106,7 @@ local-inference = [ { name = "python-docx" }, { name = "python-pptx" }, { name = "unstructured-inference", version = "1.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, - { name = "unstructured-inference", version = "1.6.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, + { name = "unstructured-inference", version = "1.6.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, { name = "unstructured-pytesseract" }, { name = "xlrd" }, ] @@ -9206,7 +9167,7 @@ wheels = [ [[package]] name = "unstructured-inference" -version = "1.6.6" +version = "1.6.7" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_machine != 's390x'", @@ -9229,9 +9190,9 @@ dependencies = [ { name = "torch", marker = "python_full_version >= '3.12'" }, { name = "transformers", marker = "python_full_version >= '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/e3/6c98caf4965e07eb0153dc2b4457ec6fb1cfef336411add4acd3b28c697c/unstructured_inference-1.6.6.tar.gz", hash = "sha256:f14745daef4c37f785d4edb6c3d3834c7414d9d5abd47ca0e377ca60c624d225", size = 47024, upload-time = "2026-04-09T19:58:52.292Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/ea/7a8eb89e518a176057a161a739c75a5daa29039497ff4c7885c6b89daab0/unstructured_inference-1.6.7.tar.gz", hash = "sha256:ae2347b2c1593da8ba7521e0a3fc0d347a37b11f4ee603f852d3f829fb06313a", size = 47217, upload-time = "2026-04-20T21:46:24.059Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/5b/bd4aa4d16446fbc79bea07b22c19c8f8b578c8f1dd73745d152511c17a5a/unstructured_inference-1.6.6-py3-none-any.whl", hash = "sha256:ac472f341407b2ea14d1b63074080af840b9badeefdcd90ea38feb22b4928e5a", size = 54286, upload-time = "2026-04-09T19:58:50.858Z" }, + { url = "https://files.pythonhosted.org/packages/db/4f/064be99f9ba168b07524f18083b62848f4d3a18b011cd1ef3a41dbcbdd1d/unstructured_inference-1.6.7-py3-none-any.whl", hash = "sha256:4fa167ad9493f9a1a4484a5360c396cb12b9d2bff35009b7f710885f60fb3c84", size = 54496, upload-time = "2026-04-20T21:46:22.721Z" }, ] [[package]] @@ -9313,16 +9274,16 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.44.0" +version = "0.45.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/da/6eee1ff8b6cbeed47eeb5229749168e81eb4b7b999a1a15a7176e51410c9/uvicorn-0.44.0.tar.gz", hash = "sha256:6c942071b68f07e178264b9152f1f16dfac5da85880c4ce06366a96d70d4f31e", size = 86947, upload-time = "2026-04-06T09:23:22.826Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/62b0d9a2cfc8b4de6771322dae30f2db76c66dae9ec32e94e176a44ad563/uvicorn-0.45.0.tar.gz", hash = "sha256:3fe650df136c5bd2b9b06efc5980636344a2fbb840e9ddd86437d53144fa335d", size = 87818, upload-time = "2026-04-21T10:43:46.815Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/23/a5bbd9600dd607411fa644c06ff4951bec3a4d82c4b852374024359c19c0/uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89", size = 69425, upload-time = "2026-04-06T09:23:21.524Z" }, + { url = "https://files.pythonhosted.org/packages/c1/88/d0f7512465b166a4e931ccf7e77792be60fb88466a43964c7566cbaff752/uvicorn-0.45.0-py3-none-any.whl", hash = "sha256:2db26f588131aeec7439de00f2dd52d5f210710c1f01e407a52c90b880d1fd4f", size = 69838, upload-time = "2026-04-21T10:43:45.029Z" }, ] [package.optional-dependencies] @@ -9377,6 +9338,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, ] +[[package]] +name = "valkey-glide" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "protobuf" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/35/fb0401c4bc7be748d937e95213786d21d9e56767b3ad816db5bad6f92c01/valkey_glide-2.0.1.tar.gz", hash = "sha256:4f9c62a88aedffd725cced7d28a9488b27e3f675d1a5294b4962624e97d346c4", size = 1026255, upload-time = "2025-06-20T01:08:15.861Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/a3/bf5ff3841538d0bb337371e073dc2c0e93f748f7f8b10a44806f36ab5fa1/valkey_glide-2.0.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:b3307934b76557b18ac559f327592cc09fc895fc653ba46010dd6d70fb6239dc", size = 5074638, upload-time = "2025-06-20T01:07:30.16Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c4/20b66dced96bdca81aa294b39bc03018ed22628c52076752e8d1d3540a7d/valkey_glide-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b83d34e2e723e97c41682479b0dce5882069066e808316292b363855992b449", size = 4750261, upload-time = "2025-06-20T01:07:32.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/58/6440e66bde8963d86bc3c44d88f993059f2a9d7ebdb3256a695d035cff50/valkey_glide-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1baaf14d09d464ae645be5bdb5dc6b8a38b7eacf22f9dcb2907200c74fbdcdd3", size = 4767755, upload-time = "2025-06-20T01:07:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/3b/69/dd5c350ce4d2cadde0d83beb601f05e1e62622895f268135e252e8bfc307/valkey_glide-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4427e7b4d54c9de289a35032c19d5956f94376f5d4335206c5ac4524cbd1c64a", size = 5094507, upload-time = "2025-06-20T01:07:35.349Z" }, + { url = "https://files.pythonhosted.org/packages/b5/dd/0dd6614e09123a5bd7273bf1159c958d1ea65e7decc2190b225d212e0cb9/valkey_glide-2.0.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6379582d6fbd817697fb119274e37d397db450103cd15d4bd71e555e6d88fb6b", size = 5072939, upload-time = "2025-06-20T01:07:36.948Z" }, + { url = "https://files.pythonhosted.org/packages/c6/04/986188e407231a5f0bfaf31f31b68e3605ab66f4f4c656adfbb0345669d9/valkey_glide-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0f1c0fe003026d8ae172369e0eb2337cbff16f41d4c085332487d6ca2e5282e6", size = 4750491, upload-time = "2025-06-20T01:07:38.659Z" }, + { url = "https://files.pythonhosted.org/packages/ac/fb/2f5cec71ae51c464502a892b6825426cd74a2c325827981726e557926c94/valkey_glide-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82c5f33598e50bcfec6fc924864931f3c6e30cd327a9c9562e1c7ac4e17e79fd", size = 4767597, upload-time = "2025-06-20T01:07:40.091Z" }, + { url = "https://files.pythonhosted.org/packages/3a/31/851a1a734fe5da5d520106fcfd824e4da09c3be8a0a2123bb4b1980db1ea/valkey_glide-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79039a9dc23bb074680f171c12b36b3322357a0af85125534993e81a619dce21", size = 5094383, upload-time = "2025-06-20T01:07:41.329Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6d/1e7b432cbc02fe63e7496b984b7fc830fb7de388c877b237e0579a6300fc/valkey_glide-2.0.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:f55ec8968b0fde364a5b3399be34b89dcb9068994b5cd384e20db0773ad12723", size = 5075024, upload-time = "2025-06-20T01:07:42.917Z" }, + { url = "https://files.pythonhosted.org/packages/ca/39/6e9f83970590d17d19f596e1b3a366d39077624888e3dd709309efc67690/valkey_glide-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21598f49313912ad27dc700d7b13a3b4bfed7ed9dffad207235cac7d218f4966", size = 4748418, upload-time = "2025-06-20T01:07:44.64Z" }, + { url = "https://files.pythonhosted.org/packages/98/0e/91335c13dc8e7ceb95063234c16010b46e2dd874a2edef62dea155081647/valkey_glide-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f662285146529328e2b5a0a7047f699339b4e0d250eb1f252b15c9befa0dea05", size = 4767264, upload-time = "2025-06-20T01:07:46.185Z" }, + { url = "https://files.pythonhosted.org/packages/5f/94/ee4d9d441f83fec1464d9f4e52f7940bdd2aeb917589e6abd57498880876/valkey_glide-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3939aaa8411fcbba00cb1ff7c7ba73f388bb1deca919972f65cba7eda1d5fa95", size = 5093543, upload-time = "2025-06-20T01:07:47.345Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7e/257a2e4b61ac29d5923f89bad5fe62be7b4a19e7bec78d191af3ce77aa39/valkey_glide-2.0.1-cp313-cp313-macosx_10_7_x86_64.whl", hash = "sha256:c49b53011a05b5820d0c660ee5c76574183b413a54faa33cf5c01ce77164d9c8", size = 5073114, upload-time = "2025-06-20T01:07:48.885Z" }, + { url = "https://files.pythonhosted.org/packages/20/14/a8a470679953980af7eac3ccb09638f2a76d4547116d48cbc69ae6f25080/valkey_glide-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3a23572b83877537916ba36ad0a6b2fd96581534f0bc67ef8f8498bf4dbb2b40", size = 4747717, upload-time = "2025-06-20T01:07:50.092Z" }, + { url = "https://files.pythonhosted.org/packages/9f/49/f168dd0c778d9f6ff1be70d5d3bad7a86928fee563de7de5f4f575eddfd8/valkey_glide-2.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:943a2c4a5c38b8a6b53281201d5a4997ec454a6fdda72d27050eeb6aaef12afb", size = 4767128, upload-time = "2025-06-20T01:07:51.306Z" }, + { url = "https://files.pythonhosted.org/packages/43/be/68961b14ea133d1792ce50f6df1753848b5377c3e06a8dbe4e39188a549a/valkey_glide-2.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d770ec581acc59d5597e7ccaac37aee7e3b5e716a77a7fa44e2967db3a715f53", size = 5093522, upload-time = "2025-06-20T01:07:52.546Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/ad8595ffe84317385d52ceab8de1e9ef06a4da6b81ca8cd61b7961923de4/valkey_glide-2.0.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d4a9ccfe2b190c90622849dab62f9468acf76a282719a1245d272b649e7c12d1", size = 5074539, upload-time = "2025-06-20T01:07:59.87Z" }, + { url = "https://files.pythonhosted.org/packages/db/e5/2122541c7a64706f3631655209bb0b13723fb99db3c190d9a792b4e7d494/valkey_glide-2.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9aa004077b82f64b23ea0d38d948b5116c23f7228dae3a5b4fcfa1799f8ff7de", size = 4753222, upload-time = "2025-06-20T01:08:01.376Z" }, + { url = "https://files.pythonhosted.org/packages/6c/13/cd9a20988a820ff61b127d3f850887b28bb734daf2c26d512d8e4c2e8e9e/valkey_glide-2.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:631a7a0e2045f7e5e3706e1903beeddf381a6529e318c27230798f4382579e4f", size = 4771530, upload-time = "2025-06-20T01:08:02.6Z" }, + { url = "https://files.pythonhosted.org/packages/c7/fc/047e89cc01b4cc71db1b6b8160d3b5d050097b408028022c002351238641/valkey_glide-2.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ed905fb62368c9bc6aef9df8d66269ef51f968dc527da4d7c956927382c1d", size = 5091242, upload-time = "2025-06-20T01:08:04.111Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9e/68790c1a263f3a0094d67d0109be34631f6f79c2fbce5ced7e33a65ad363/valkey_glide-2.0.1-pp311-pypy311_pp73-macosx_10_7_x86_64.whl", hash = "sha256:53da3cc47c8d946ac76ecc4b468a469d3486778833a59162ea69aa7ce70cbb27", size = 5072793, upload-time = "2025-06-20T01:08:05.562Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ae/a935af65ae4069d76c69f28f6bfb4533da8b89f7fc418beb7a1482cdd9ee/valkey_glide-2.0.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:e526a7d718cdd299d6b03091c12dcc15cd02ff22fe420f253341a4891c50824d", size = 4753435, upload-time = "2025-06-20T01:08:07.149Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c2/c91d753a89dd87dce2fc8932cfbe174c7a1226c657b3cd64c063f21d4fe6/valkey_glide-2.0.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d3345ea2adf6f745733fa5157d8709bcf5ffbb2674391aeebd8f166a37cbc96", size = 4771401, upload-time = "2025-06-20T01:08:08.359Z" }, + { url = "https://files.pythonhosted.org/packages/00/fe/ad83cfc2ac87bf6bad2b75fa64fca5a6dd54568c1de551d36d369e07f948/valkey_glide-2.0.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1c5fff0f12d2aa4277ddc335035b2c8e12bb11243c1a0f3c35071f4a8b11064", size = 5091360, upload-time = "2025-06-20T01:08:09.622Z" }, +] + [[package]] name = "vcrpy" version = "7.0.0" @@ -9531,48 +9529,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] -[[package]] -name = "weaviate-client" -version = "4.16.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13' and platform_machine != 's390x'", - "python_full_version < '3.11' and platform_machine != 's390x'", - "python_full_version < '3.11' and platform_machine == 's390x'", -] -dependencies = [ - { name = "authlib", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "deprecation", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "grpcio", version = "1.80.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "grpcio-health-checking", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "httpx", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "pydantic", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, - { name = "validators", marker = "python_full_version < '3.11' or (python_full_version >= '3.13' and platform_machine != 's390x')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a7/b9/7b9e05cf923743aa1479afcd85c48ebca82d031c3c3a5d02b1b3fcb52eb9/weaviate_client-4.16.2.tar.gz", hash = "sha256:eb7107a3221a5ad68d604cafc65195bd925a9709512ea0b6fe0dd212b0678fab", size = 681321, upload-time = "2025-07-22T09:10:48.79Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/c8/8a8c7ddbdd2c7fc73782056310666736a36a7d860f9935ce1d21f5f6c02e/weaviate_client-4.16.2-py3-none-any.whl", hash = "sha256:c236adca30d18667943544ad89fcd9157947af95dfc6de4a8ecf9e7619f1c979", size = 451475, upload-time = "2025-07-22T09:10:46.941Z" }, -] - [[package]] name = "weaviate-client" version = "4.18.3" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13' and platform_machine == 's390x'", - "python_full_version == '3.12.*' and platform_machine != 's390x'", - "python_full_version == '3.12.*' and platform_machine == 's390x'", - "python_full_version == '3.11.*' and platform_machine != 's390x'", - "python_full_version == '3.11.*' and platform_machine == 's390x'", -] dependencies = [ - { name = "authlib", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "deprecation", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "grpcio", version = "1.78.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "httpx", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "protobuf", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "pydantic", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, - { name = "validators", marker = "(python_full_version >= '3.11' and python_full_version < '3.13') or (python_full_version >= '3.11' and platform_machine == 's390x')" }, + { name = "authlib" }, + { name = "deprecation" }, + { name = "grpcio" }, + { name = "httpx" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "validators" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a8/76/14e07761c5fb7e8573e3cff562e2d9073c65f266db0e67511403d10435b1/weaviate_client-4.18.3.tar.gz", hash = "sha256:9d889246d62be36641a7f2b8cedf5fb665b804d46f7a53ae37e02d297a11f119", size = 783634, upload-time = "2025-12-03T09:38:28.261Z" } wheels = [