diff --git a/docs/ar/changelog.mdx b/docs/ar/changelog.mdx
index 09a397719c..c99dba7d74 100644
--- a/docs/ar/changelog.mdx
+++ b/docs/ar/changelog.mdx
@@ -4,6 +4,31 @@ description: "تحديثات المنتج والتحسينات وإصلاحات
icon: "clock"
mode: "wide"
---
+
+ ## v1.14.3a1
+
+ [عرض الإصدار على GitHub](https://github.com/crewAIInc/crewAI/releases/tag/1.14.3a1)
+
+ ## ما الذي تغير
+
+ ### الميزات
+ - إضافة دعم نقاط التحقق والفروع لوكلاء مستقلين
+
+ ### إصلاحات الأخطاء
+ - الحفاظ على thought_signature في استدعاءات أداة البث Gemini
+ - إصدار task_started عند استئناف الفرع وإعادة تصميم واجهة المستخدم النصية لنقاط التحقق
+ - تصحيح ترتيب التشغيل الجاف ومعالجة الفرع القديم الذي تم التحقق منه في إصدار أدوات التطوير
+ - استخدام تواريخ مستقبلية في اختبارات تقليم نقاط التحقق لمنع الفشل المعتمد على الوقت (#5543)
+
+ ### الوثائق
+ - تحديث سجل التغييرات والإصدار لـ v1.14.2
+
+ ## المساهمون
+
+ @alex-clawd, @greysonlalonde
+
+
+
## v1.14.2
diff --git a/docs/en/changelog.mdx b/docs/en/changelog.mdx
index aabff6e246..26ac50285e 100644
--- a/docs/en/changelog.mdx
+++ b/docs/en/changelog.mdx
@@ -4,6 +4,31 @@ description: "Product updates, improvements, and bug fixes for CrewAI"
icon: "clock"
mode: "wide"
---
+
+ ## v1.14.3a1
+
+ [View release on GitHub](https://github.com/crewAIInc/crewAI/releases/tag/1.14.3a1)
+
+ ## What's Changed
+
+ ### Features
+ - Add checkpoint and fork support to standalone agents
+
+ ### Bug Fixes
+ - Preserve thought_signature in Gemini streaming tool calls
+ - Emit task_started on fork resume and redesign checkpoint TUI
+ - Correct dry-run order and handle checked-out stale branch in devtools release
+ - Use future dates in checkpoint prune tests to prevent time-dependent failures (#5543)
+
+ ### Documentation
+ - Update changelog and version for v1.14.2
+
+ ## Contributors
+
+ @alex-clawd, @greysonlalonde
+
+
+
## v1.14.2
diff --git a/docs/ko/changelog.mdx b/docs/ko/changelog.mdx
index 0901afab68..ab97157144 100644
--- a/docs/ko/changelog.mdx
+++ b/docs/ko/changelog.mdx
@@ -4,6 +4,31 @@ description: "CrewAI의 제품 업데이트, 개선 사항 및 버그 수정"
icon: "clock"
mode: "wide"
---
+
+ ## v1.14.3a1
+
+ [GitHub 릴리스 보기](https://github.com/crewAIInc/crewAI/releases/tag/1.14.3a1)
+
+ ## 변경 사항
+
+ ### 기능
+ - 독립형 에이전트에 체크포인트 및 포크 지원 추가
+
+ ### 버그 수정
+ - Gemini 스트리밍 도구 호출에서 thought_signature 보존
+ - 포크 재개 시 task_started 방출 및 체크포인트 TUI 재설계
+ - dry-run 순서 수정 및 devtools 릴리스에서 체크아웃된 오래된 브랜치 처리
+ - 체크포인트 가지치기 테스트에서 미래 날짜 사용하여 시간 의존성 실패 방지 (#5543)
+
+ ### 문서
+ - v1.14.2에 대한 변경 로그 및 버전 업데이트
+
+ ## 기여자
+
+ @alex-clawd, @greysonlalonde
+
+
+
## v1.14.2
diff --git a/docs/pt-BR/changelog.mdx b/docs/pt-BR/changelog.mdx
index cf1eef1a9b..a32cc6ff54 100644
--- a/docs/pt-BR/changelog.mdx
+++ b/docs/pt-BR/changelog.mdx
@@ -4,6 +4,31 @@ description: "Atualizações de produto, melhorias e correções do CrewAI"
icon: "clock"
mode: "wide"
---
+
+ ## v1.14.3a1
+
+ [Ver release no GitHub](https://github.com/crewAIInc/crewAI/releases/tag/1.14.3a1)
+
+ ## O que Mudou
+
+ ### Funcionalidades
+ - Adicionar suporte a checkpoint e fork para agentes autônomos
+
+ ### Correções de Bugs
+ - Preservar thought_signature nas chamadas da ferramenta de streaming Gemini
+ - Emitir task_started na retomada do fork e redesenhar a TUI de checkpoint
+ - Corrigir a ordem do dry-run e lidar com branch desatualizada em release do devtools
+ - Usar datas futuras nos testes de poda de checkpoint para evitar falhas dependentes do tempo (#5543)
+
+ ### Documentação
+ - Atualizar changelog e versão para v1.14.2
+
+ ## Contribuidores
+
+ @alex-clawd, @greysonlalonde
+
+
+
## v1.14.2
diff --git a/lib/crewai-files/src/crewai_files/__init__.py b/lib/crewai-files/src/crewai_files/__init__.py
index 1b954d8865..9fba24b558 100644
--- a/lib/crewai-files/src/crewai_files/__init__.py
+++ b/lib/crewai-files/src/crewai_files/__init__.py
@@ -152,4 +152,4 @@
"wrap_file_source",
]
-__version__ = "1.14.2"
+__version__ = "1.14.3a1"
diff --git a/lib/crewai-tools/pyproject.toml b/lib/crewai-tools/pyproject.toml
index c75e406dd2..473f2e4114 100644
--- a/lib/crewai-tools/pyproject.toml
+++ b/lib/crewai-tools/pyproject.toml
@@ -10,7 +10,7 @@ requires-python = ">=3.10, <3.14"
dependencies = [
"pytube~=15.0.0",
"requests>=2.33.0,<3",
- "crewai==1.14.2",
+ "crewai==1.14.3a1",
"tiktoken~=0.8.0",
"beautifulsoup4~=4.13.4",
"python-docx~=1.2.0",
@@ -139,6 +139,9 @@ contextual = [
"contextual-client>=0.1.0",
"nest-asyncio>=1.6.0",
]
+daytona = [
+ "daytona~=0.140.0",
+]
[tool.uv]
diff --git a/lib/crewai-tools/src/crewai_tools/__init__.py b/lib/crewai-tools/src/crewai_tools/__init__.py
index 56b30bf9b3..87e53b7567 100644
--- a/lib/crewai-tools/src/crewai_tools/__init__.py
+++ b/lib/crewai-tools/src/crewai_tools/__init__.py
@@ -59,6 +59,11 @@
from crewai_tools.tools.databricks_query_tool.databricks_query_tool import (
DatabricksQueryTool,
)
+from crewai_tools.tools.daytona_sandbox_tool import (
+ DaytonaExecTool,
+ DaytonaFileTool,
+ DaytonaPythonTool,
+)
from crewai_tools.tools.directory_read_tool.directory_read_tool import (
DirectoryReadTool,
)
@@ -232,6 +237,9 @@
"DOCXSearchTool",
"DallETool",
"DatabricksQueryTool",
+ "DaytonaExecTool",
+ "DaytonaFileTool",
+ "DaytonaPythonTool",
"DirectoryReadTool",
"DirectorySearchTool",
"EXASearchTool",
@@ -305,4 +313,4 @@
"ZapierActionTools",
]
-__version__ = "1.14.2"
+__version__ = "1.14.3a1"
diff --git a/lib/crewai-tools/src/crewai_tools/tools/__init__.py b/lib/crewai-tools/src/crewai_tools/tools/__init__.py
index d3c1da6645..40fdb74eb6 100644
--- a/lib/crewai-tools/src/crewai_tools/tools/__init__.py
+++ b/lib/crewai-tools/src/crewai_tools/tools/__init__.py
@@ -48,6 +48,11 @@
from crewai_tools.tools.databricks_query_tool.databricks_query_tool import (
DatabricksQueryTool,
)
+from crewai_tools.tools.daytona_sandbox_tool import (
+ DaytonaExecTool,
+ DaytonaFileTool,
+ DaytonaPythonTool,
+)
from crewai_tools.tools.directory_read_tool.directory_read_tool import (
DirectoryReadTool,
)
@@ -217,6 +222,9 @@
"DOCXSearchTool",
"DallETool",
"DatabricksQueryTool",
+ "DaytonaExecTool",
+ "DaytonaFileTool",
+ "DaytonaPythonTool",
"DirectoryReadTool",
"DirectorySearchTool",
"EXASearchTool",
diff --git a/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/README.md b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/README.md
new file mode 100644
index 0000000000..a2365049e0
--- /dev/null
+++ b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/README.md
@@ -0,0 +1,107 @@
+# Daytona Sandbox Tools
+
+Run shell commands, execute Python, and manage files inside a [Daytona](https://www.daytona.io/) sandbox. Daytona provides isolated, ephemeral compute environments suitable for agent-driven code execution.
+
+Three tools are provided so you can pick what the agent actually needs:
+
+- **`DaytonaExecTool`** — run a shell command (`sandbox.process.exec`).
+- **`DaytonaPythonTool`** — run a Python script (`sandbox.process.code_run`).
+- **`DaytonaFileTool`** — read / write / list / delete files (`sandbox.fs.*`).
+
+## Installation
+
+```shell
+uv add "crewai-tools[daytona]"
+# or
+pip install "crewai-tools[daytona]"
+```
+
+Set the API key:
+
+```shell
+export DAYTONA_API_KEY="..."
+```
+
+`DAYTONA_API_URL` and `DAYTONA_TARGET` are also respected if set.
+
+## Sandbox lifecycle
+
+All three tools share the same lifecycle controls from `DaytonaBaseTool`:
+
+| Mode | When the sandbox is created | When it is deleted |
+| --- | --- | --- |
+| **Ephemeral** (default, `persistent=False`) | On every `_run` call | At the end of that same call |
+| **Persistent** (`persistent=True`) | Lazily on first use | At process exit (via `atexit`), or manually via `tool.close()` |
+| **Attach** (`sandbox_id="…"`) | Never — the tool attaches to an existing sandbox | Never — the tool will not delete a sandbox it did not create |
+
+Ephemeral mode is the safe default: nothing leaks if the agent forgets to clean up. Use persistent mode when you want filesystem state or installed packages to carry across steps — this is typical when pairing `DaytonaFileTool` with `DaytonaExecTool`.
+
+## Examples
+
+### One-shot Python execution (ephemeral)
+
+```python
+from crewai_tools import DaytonaPythonTool
+
+tool = DaytonaPythonTool()
+result = tool.run(code="print(sum(range(10)))")
+```
+
+### Multi-step shell session (persistent)
+
+```python
+from crewai_tools import DaytonaExecTool, DaytonaFileTool
+
+exec_tool = DaytonaExecTool(persistent=True)
+file_tool = DaytonaFileTool(persistent=True)
+
+# Agent writes a script, then runs it — both share the same sandbox instance
+# because they each keep their own persistent sandbox. If you need the *same*
+# sandbox across two tools, create one tool, grab the sandbox id via
+# `tool._persistent_sandbox.id`, and pass it to the other via `sandbox_id=...`.
+```
+
+### Attach to an existing sandbox
+
+```python
+from crewai_tools import DaytonaExecTool
+
+tool = DaytonaExecTool(sandbox_id="my-long-lived-sandbox")
+```
+
+### Custom create params
+
+Pass Daytona's `CreateSandboxFromSnapshotParams` kwargs via `create_params`:
+
+```python
+tool = DaytonaExecTool(
+ persistent=True,
+ create_params={
+ "language": "python",
+ "env_vars": {"MY_FLAG": "1"},
+ "labels": {"owner": "crewai-agent"},
+ },
+)
+```
+
+## Tool arguments
+
+### `DaytonaExecTool`
+- `command: str` — shell command to run.
+- `cwd: str | None` — working directory.
+- `env: dict[str, str] | None` — extra env vars for this command.
+- `timeout: int | None` — seconds.
+
+### `DaytonaPythonTool`
+- `code: str` — Python source to execute.
+- `argv: list[str] | None` — argv forwarded via `CodeRunParams`.
+- `env: dict[str, str] | None` — env vars forwarded via `CodeRunParams`.
+- `timeout: int | None` — seconds.
+
+### `DaytonaFileTool`
+- `action: "read" | "write" | "list" | "delete" | "mkdir" | "info"`
+- `path: str` — absolute path inside the sandbox.
+- `content: str | None` — required for `write`.
+- `binary: bool` — if `True`, `content` is base64 on write / returned as base64 on read.
+- `recursive: bool` — for `delete`, removes directories recursively.
+- `mode: str` — for `mkdir`, octal permission string (default `"0755"`).
diff --git a/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/__init__.py b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/__init__.py
new file mode 100644
index 0000000000..e04396bfb0
--- /dev/null
+++ b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/__init__.py
@@ -0,0 +1,13 @@
+from crewai_tools.tools.daytona_sandbox_tool.daytona_base_tool import DaytonaBaseTool
+from crewai_tools.tools.daytona_sandbox_tool.daytona_exec_tool import DaytonaExecTool
+from crewai_tools.tools.daytona_sandbox_tool.daytona_file_tool import DaytonaFileTool
+from crewai_tools.tools.daytona_sandbox_tool.daytona_python_tool import (
+ DaytonaPythonTool,
+)
+
+__all__ = [
+ "DaytonaBaseTool",
+ "DaytonaExecTool",
+ "DaytonaFileTool",
+ "DaytonaPythonTool",
+]
diff --git a/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_base_tool.py b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_base_tool.py
new file mode 100644
index 0000000000..b601e4309d
--- /dev/null
+++ b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_base_tool.py
@@ -0,0 +1,198 @@
+from __future__ import annotations
+
+import atexit
+import logging
+import os
+import threading
+from typing import Any, ClassVar
+
+from crewai.tools import BaseTool, EnvVar
+from pydantic import ConfigDict, Field, PrivateAttr
+
+
+logger = logging.getLogger(__name__)
+
+
+class DaytonaBaseTool(BaseTool):
+ """Shared base for tools that act on a Daytona sandbox.
+
+ Lifecycle modes:
+ - persistent=False (default): create a fresh sandbox per `_run` call and
+ delete it when the call returns. Safer and stateless — nothing leaks if
+ the agent forgets cleanup.
+ - persistent=True: lazily create a single sandbox on first use, cache it
+ on the instance, and register an atexit hook to delete it at process
+ exit. Cheaper across many calls and lets files/state carry over.
+ - sandbox_id=: attach to a sandbox the caller already owns.
+ Never deleted by the tool.
+ """
+
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+
+ package_dependencies: list[str] = Field(default_factory=lambda: ["daytona"])
+
+ api_key: str | None = Field(
+ default_factory=lambda: os.getenv("DAYTONA_API_KEY"),
+ description="Daytona API key. Falls back to DAYTONA_API_KEY env var.",
+ json_schema_extra={"required": False},
+ )
+ api_url: str | None = Field(
+ default_factory=lambda: os.getenv("DAYTONA_API_URL"),
+ description="Daytona API URL override. Falls back to DAYTONA_API_URL env var.",
+ json_schema_extra={"required": False},
+ )
+ target: str | None = Field(
+ default_factory=lambda: os.getenv("DAYTONA_TARGET"),
+ description="Daytona target region. Falls back to DAYTONA_TARGET env var.",
+ json_schema_extra={"required": False},
+ )
+
+ persistent: bool = Field(
+ default=False,
+ description=(
+ "If True, reuse one sandbox across all calls to this tool instance "
+ "and delete it at process exit. Default False creates and deletes a "
+ "fresh sandbox per call."
+ ),
+ )
+ sandbox_id: str | None = Field(
+ default=None,
+ description=(
+ "Attach to an existing sandbox by id or name instead of creating a "
+ "new one. The tool will never delete a sandbox it did not create."
+ ),
+ )
+ create_params: dict[str, Any] | None = Field(
+ default=None,
+ description=(
+ "Optional kwargs forwarded to CreateSandboxFromSnapshotParams when "
+ "creating a sandbox (e.g. language, snapshot, env_vars, labels)."
+ ),
+ )
+ sandbox_timeout: float = Field(
+ default=60.0,
+ description="Timeout in seconds for sandbox create/delete operations.",
+ )
+
+ env_vars: list[EnvVar] = Field(
+ default_factory=lambda: [
+ EnvVar(
+ name="DAYTONA_API_KEY",
+ description="API key for Daytona sandbox service",
+ required=False,
+ ),
+ EnvVar(
+ name="DAYTONA_API_URL",
+ description="Daytona API base URL (optional)",
+ required=False,
+ ),
+ EnvVar(
+ name="DAYTONA_TARGET",
+ description="Daytona target region (optional)",
+ required=False,
+ ),
+ ]
+ )
+
+ _client: Any | None = PrivateAttr(default=None)
+ _persistent_sandbox: Any | None = PrivateAttr(default=None)
+ _lock: threading.Lock = PrivateAttr(default_factory=threading.Lock)
+ _cleanup_registered: bool = PrivateAttr(default=False)
+
+ _sdk_cache: ClassVar[dict[str, Any]] = {}
+
+ @classmethod
+ def _import_sdk(cls) -> dict[str, Any]:
+ if cls._sdk_cache:
+ return cls._sdk_cache
+ try:
+ from daytona import (
+ CreateSandboxFromSnapshotParams,
+ Daytona,
+ DaytonaConfig,
+ )
+ except ImportError as exc:
+ raise ImportError(
+ "The 'daytona' package is required for Daytona sandbox tools. "
+ "Install it with: uv add daytona (or) pip install daytona"
+ ) from exc
+ cls._sdk_cache = {
+ "Daytona": Daytona,
+ "DaytonaConfig": DaytonaConfig,
+ "CreateSandboxFromSnapshotParams": CreateSandboxFromSnapshotParams,
+ }
+ return cls._sdk_cache
+
+ def _get_client(self) -> Any:
+ if self._client is not None:
+ return self._client
+ sdk = self._import_sdk()
+ config_kwargs: dict[str, Any] = {}
+ if self.api_key:
+ config_kwargs["api_key"] = self.api_key
+ if self.api_url:
+ config_kwargs["api_url"] = self.api_url
+ if self.target:
+ config_kwargs["target"] = self.target
+ config = sdk["DaytonaConfig"](**config_kwargs) if config_kwargs else None
+ self._client = sdk["Daytona"](config) if config else sdk["Daytona"]()
+ return self._client
+
+ def _build_create_params(self) -> Any | None:
+ if not self.create_params:
+ return None
+ sdk = self._import_sdk()
+ return sdk["CreateSandboxFromSnapshotParams"](**self.create_params)
+
+ def _acquire_sandbox(self) -> tuple[Any, bool]:
+ """Return (sandbox, should_delete_after_use)."""
+ client = self._get_client()
+
+ if self.sandbox_id:
+ return client.get(self.sandbox_id), False
+
+ if self.persistent:
+ with self._lock:
+ if self._persistent_sandbox is None:
+ self._persistent_sandbox = client.create(
+ self._build_create_params(),
+ timeout=self.sandbox_timeout,
+ )
+ if not self._cleanup_registered:
+ atexit.register(self.close)
+ self._cleanup_registered = True
+ return self._persistent_sandbox, False
+
+ sandbox = client.create(
+ self._build_create_params(),
+ timeout=self.sandbox_timeout,
+ )
+ return sandbox, True
+
+ def _release_sandbox(self, sandbox: Any, should_delete: bool) -> None:
+ if not should_delete:
+ return
+ try:
+ sandbox.delete(timeout=self.sandbox_timeout)
+ except Exception:
+ logger.debug(
+ "Best-effort sandbox cleanup failed after ephemeral use; "
+ "the sandbox may need manual deletion.",
+ exc_info=True,
+ )
+
+ def close(self) -> None:
+ """Delete the cached persistent sandbox if one exists."""
+ with self._lock:
+ sandbox = self._persistent_sandbox
+ self._persistent_sandbox = None
+ if sandbox is None:
+ return
+ try:
+ sandbox.delete(timeout=self.sandbox_timeout)
+ except Exception:
+ logger.debug(
+ "Best-effort persistent sandbox cleanup failed at close(); "
+ "the sandbox may need manual deletion.",
+ exc_info=True,
+ )
diff --git a/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_exec_tool.py b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_exec_tool.py
new file mode 100644
index 0000000000..cffcab2205
--- /dev/null
+++ b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_exec_tool.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from builtins import type as type_
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+from crewai_tools.tools.daytona_sandbox_tool.daytona_base_tool import DaytonaBaseTool
+
+
+class DaytonaExecToolSchema(BaseModel):
+ command: str = Field(..., description="Shell command to execute in the sandbox.")
+ cwd: str | None = Field(
+ default=None,
+ description="Working directory to run the command in. Defaults to the sandbox work dir.",
+ )
+ env: dict[str, str] | None = Field(
+ default=None,
+ description="Optional environment variables to set for this command.",
+ )
+ timeout: int | None = Field(
+ default=None,
+ description="Maximum seconds to wait for the command to finish.",
+ )
+
+
+class DaytonaExecTool(DaytonaBaseTool):
+ """Run a shell command inside a Daytona sandbox."""
+
+ name: str = "Daytona Sandbox Exec"
+ description: str = (
+ "Execute a shell command inside a Daytona sandbox and return the exit "
+ "code and combined output. Use this to run builds, package installs, "
+ "git operations, or any one-off shell command."
+ )
+ args_schema: type_[BaseModel] = DaytonaExecToolSchema
+
+ def _run(
+ self,
+ command: str,
+ cwd: str | None = None,
+ env: dict[str, str] | None = None,
+ timeout: int | None = None,
+ ) -> Any:
+ sandbox, should_delete = self._acquire_sandbox()
+ try:
+ response = sandbox.process.exec(
+ command,
+ cwd=cwd,
+ env=env,
+ timeout=timeout,
+ )
+ return {
+ "exit_code": getattr(response, "exit_code", None),
+ "result": getattr(response, "result", None),
+ "artifacts": getattr(response, "artifacts", None),
+ }
+ finally:
+ self._release_sandbox(sandbox, should_delete)
diff --git a/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_file_tool.py b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_file_tool.py
new file mode 100644
index 0000000000..e019419b3e
--- /dev/null
+++ b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_file_tool.py
@@ -0,0 +1,205 @@
+from __future__ import annotations
+
+import base64
+from builtins import type as type_
+import logging
+import posixpath
+from typing import Any, Literal
+
+from pydantic import BaseModel, Field, model_validator
+
+from crewai_tools.tools.daytona_sandbox_tool.daytona_base_tool import DaytonaBaseTool
+
+
+logger = logging.getLogger(__name__)
+
+
+FileAction = Literal["read", "write", "append", "list", "delete", "mkdir", "info"]
+
+
+class DaytonaFileToolSchema(BaseModel):
+ action: FileAction = Field(
+ ...,
+ description=(
+ "The filesystem action to perform: 'read' (returns file contents), "
+ "'write' (create or replace a file with content), 'append' (append "
+ "content to an existing file — use this for writing large files in "
+ "chunks to avoid hitting tool-call size limits), 'list' (lists a "
+ "directory), 'delete' (removes a file/dir), 'mkdir' (creates a "
+ "directory), 'info' (returns file metadata)."
+ ),
+ )
+ path: str = Field(..., description="Absolute path inside the sandbox.")
+ content: str | None = Field(
+ default=None,
+ description=(
+ "Content to write or append. If omitted for 'write', an empty file "
+ "is created. For files larger than a few KB, prefer one 'write' "
+ "with empty content followed by multiple 'append' calls of ~4KB "
+ "each to stay within tool-call payload limits."
+ ),
+ )
+ binary: bool = Field(
+ default=False,
+ description=(
+ "For 'write': treat content as base64 and upload raw bytes. "
+ "For 'read': return contents as base64 instead of decoded utf-8."
+ ),
+ )
+ recursive: bool = Field(
+ default=False,
+ description="For action='delete': remove directories recursively.",
+ )
+ mode: str = Field(
+ default="0755",
+ description="For action='mkdir': octal permission string (default 0755).",
+ )
+
+ @model_validator(mode="after")
+ def _validate_action_args(self) -> DaytonaFileToolSchema:
+ if self.action == "append" and self.content is None:
+ raise ValueError(
+ "action='append' requires 'content'. Pass the chunk to append "
+ "in the 'content' field."
+ )
+ return self
+
+
+class DaytonaFileTool(DaytonaBaseTool):
+ """Read, write, and manage files inside a Daytona sandbox.
+
+ Notes:
+ - Most useful with `persistent=True` or an explicit `sandbox_id`. With the
+ default ephemeral mode, files disappear when this tool call finishes.
+ """
+
+ name: str = "Daytona Sandbox Files"
+ description: str = (
+ "Perform filesystem operations inside a Daytona sandbox: read a file, "
+ "write content to a path, append content to an existing file, list a "
+ "directory, delete a path, make a directory, or fetch file metadata. "
+ "For files larger than a few KB, create the file with action='write' "
+ "and empty content, then send the body via multiple 'append' calls of "
+ "~4KB each to stay within tool-call payload limits."
+ )
+ args_schema: type_[BaseModel] = DaytonaFileToolSchema
+
+ def _run(
+ self,
+ action: FileAction,
+ path: str,
+ content: str | None = None,
+ binary: bool = False,
+ recursive: bool = False,
+ mode: str = "0755",
+ ) -> Any:
+ sandbox, should_delete = self._acquire_sandbox()
+ try:
+ if action == "read":
+ return self._read(sandbox, path, binary=binary)
+ if action == "write":
+ return self._write(sandbox, path, content or "", binary=binary)
+ if action == "append":
+ return self._append(sandbox, path, content or "", binary=binary)
+ if action == "list":
+ return self._list(sandbox, path)
+ if action == "delete":
+ sandbox.fs.delete_file(path, recursive=recursive)
+ return {"status": "deleted", "path": path}
+ if action == "mkdir":
+ sandbox.fs.create_folder(path, mode)
+ return {"status": "created", "path": path, "mode": mode}
+ if action == "info":
+ return self._info(sandbox, path)
+ raise ValueError(f"Unknown action: {action}")
+ finally:
+ self._release_sandbox(sandbox, should_delete)
+
+ def _read(self, sandbox: Any, path: str, *, binary: bool) -> dict[str, Any]:
+ data: bytes = sandbox.fs.download_file(path)
+ if binary:
+ return {
+ "path": path,
+ "encoding": "base64",
+ "content": base64.b64encode(data).decode("ascii"),
+ }
+ try:
+ return {"path": path, "encoding": "utf-8", "content": data.decode("utf-8")}
+ except UnicodeDecodeError:
+ return {
+ "path": path,
+ "encoding": "base64",
+ "content": base64.b64encode(data).decode("ascii"),
+ "note": "File was not valid utf-8; returned as base64.",
+ }
+
+ def _write(
+ self, sandbox: Any, path: str, content: str, *, binary: bool
+ ) -> dict[str, Any]:
+ payload = base64.b64decode(content) if binary else content.encode("utf-8")
+ self._ensure_parent_dir(sandbox, path)
+ sandbox.fs.upload_file(payload, path)
+ return {"status": "written", "path": path, "bytes": len(payload)}
+
+ def _append(
+ self, sandbox: Any, path: str, content: str, *, binary: bool
+ ) -> dict[str, Any]:
+ chunk = base64.b64decode(content) if binary else content.encode("utf-8")
+ self._ensure_parent_dir(sandbox, path)
+ try:
+ existing: bytes = sandbox.fs.download_file(path)
+ except Exception:
+ existing = b""
+ payload = existing + chunk
+ sandbox.fs.upload_file(payload, path)
+ return {
+ "status": "appended",
+ "path": path,
+ "appended_bytes": len(chunk),
+ "total_bytes": len(payload),
+ }
+
+ @staticmethod
+ def _ensure_parent_dir(sandbox: Any, path: str) -> None:
+ """Make sure the parent directory of `path` exists.
+
+ Daytona's upload returns 400 if the parent directory is missing. We
+ best-effort mkdir the parent; any error (e.g. already exists) is
+ swallowed because `create_folder` is not idempotent on the server.
+ """
+ parent = posixpath.dirname(path)
+ if not parent or parent in ("/", "."):
+ return
+ try:
+ sandbox.fs.create_folder(parent, "0755")
+ except Exception:
+ logger.debug(
+ "Best-effort parent-directory create failed for %s; "
+ "assuming it already exists and proceeding with the write.",
+ parent,
+ exc_info=True,
+ )
+
+ def _list(self, sandbox: Any, path: str) -> dict[str, Any]:
+ entries = sandbox.fs.list_files(path)
+ return {
+ "path": path,
+ "entries": [self._file_info_to_dict(entry) for entry in entries],
+ }
+
+ def _info(self, sandbox: Any, path: str) -> dict[str, Any]:
+ return self._file_info_to_dict(sandbox.fs.get_file_info(path))
+
+ @staticmethod
+ def _file_info_to_dict(info: Any) -> dict[str, Any]:
+ fields = (
+ "name",
+ "size",
+ "mode",
+ "permissions",
+ "is_dir",
+ "mod_time",
+ "owner",
+ "group",
+ )
+ return {field: getattr(info, field, None) for field in fields}
diff --git a/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_python_tool.py b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_python_tool.py
new file mode 100644
index 0000000000..c0bc9d4058
--- /dev/null
+++ b/lib/crewai-tools/src/crewai_tools/tools/daytona_sandbox_tool/daytona_python_tool.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+from builtins import type as type_
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+from crewai_tools.tools.daytona_sandbox_tool.daytona_base_tool import DaytonaBaseTool
+
+
+class DaytonaPythonToolSchema(BaseModel):
+ code: str = Field(
+ ...,
+ description="Python source to execute inside the sandbox.",
+ )
+ argv: list[str] | None = Field(
+ default=None,
+ description="Optional argv passed to the script (forwarded as params.argv).",
+ )
+ env: dict[str, str] | None = Field(
+ default=None,
+ description="Optional environment variables for the run (forwarded as params.env).",
+ )
+ timeout: int | None = Field(
+ default=None,
+ description="Maximum seconds to wait for the code to finish.",
+ )
+
+
+class DaytonaPythonTool(DaytonaBaseTool):
+ """Run Python source inside a Daytona sandbox."""
+
+ name: str = "Daytona Sandbox Python"
+ description: str = (
+ "Execute a block of Python code inside a Daytona sandbox and return the "
+ "exit code, captured stdout, and any produced artifacts. Use this for "
+ "data processing, quick scripts, or analysis that should run in an "
+ "isolated environment."
+ )
+ args_schema: type_[BaseModel] = DaytonaPythonToolSchema
+
+ def _run(
+ self,
+ code: str,
+ argv: list[str] | None = None,
+ env: dict[str, str] | None = None,
+ timeout: int | None = None,
+ ) -> Any:
+ sandbox, should_delete = self._acquire_sandbox()
+ try:
+ params = self._build_code_run_params(argv=argv, env=env)
+ response = sandbox.process.code_run(code, params=params, timeout=timeout)
+ return {
+ "exit_code": getattr(response, "exit_code", None),
+ "result": getattr(response, "result", None),
+ "artifacts": getattr(response, "artifacts", None),
+ }
+ finally:
+ self._release_sandbox(sandbox, should_delete)
+
+ def _build_code_run_params(
+ self,
+ argv: list[str] | None,
+ env: dict[str, str] | None,
+ ) -> Any | None:
+ if argv is None and env is None:
+ return None
+ try:
+ from daytona import CodeRunParams
+ except ImportError as exc:
+ raise ImportError(
+ "Could not import daytona.CodeRunParams while building "
+ "argv/env for sandbox.process.code_run. This usually means the "
+ "installed 'daytona' SDK is too old or incompatible. Upgrade "
+ "with: pip install -U 'crewai-tools[daytona]'"
+ ) from exc
+ kwargs: dict[str, Any] = {}
+ if argv is not None:
+ kwargs["argv"] = argv
+ if env is not None:
+ kwargs["env"] = env
+ return CodeRunParams(**kwargs)
diff --git a/lib/crewai-tools/tool.specs.json b/lib/crewai-tools/tool.specs.json
index a005015031..6bd3747497 100644
--- a/lib/crewai-tools/tool.specs.json
+++ b/lib/crewai-tools/tool.specs.json
@@ -6976,6 +6976,634 @@
"type": "object"
}
},
+ {
+ "description": "Execute a shell command inside a Daytona sandbox and return the exit code and combined output. Use this to run builds, package installs, git operations, or any one-off shell command.",
+ "env_vars": [
+ {
+ "default": null,
+ "description": "API key for Daytona sandbox service",
+ "name": "DAYTONA_API_KEY",
+ "required": false
+ },
+ {
+ "default": null,
+ "description": "Daytona API base URL (optional)",
+ "name": "DAYTONA_API_URL",
+ "required": false
+ },
+ {
+ "default": null,
+ "description": "Daytona target region (optional)",
+ "name": "DAYTONA_TARGET",
+ "required": false
+ }
+ ],
+ "humanized_name": "Daytona Sandbox Exec",
+ "init_params_schema": {
+ "$defs": {
+ "EnvVar": {
+ "properties": {
+ "default": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Default"
+ },
+ "description": {
+ "title": "Description",
+ "type": "string"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "required": {
+ "default": true,
+ "title": "Required",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "name",
+ "description"
+ ],
+ "title": "EnvVar",
+ "type": "object"
+ }
+ },
+ "description": "Run a shell command inside a Daytona sandbox.",
+ "properties": {
+ "api_key": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona API key. Falls back to DAYTONA_API_KEY env var.",
+ "required": false,
+ "title": "Api Key"
+ },
+ "api_url": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona API URL override. Falls back to DAYTONA_API_URL env var.",
+ "required": false,
+ "title": "Api Url"
+ },
+ "create_params": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Optional kwargs forwarded to CreateSandboxFromSnapshotParams when creating a sandbox (e.g. language, snapshot, env_vars, labels).",
+ "title": "Create Params"
+ },
+ "persistent": {
+ "default": false,
+ "description": "If True, reuse one sandbox across all calls to this tool instance and delete it at process exit. Default False creates and deletes a fresh sandbox per call.",
+ "title": "Persistent",
+ "type": "boolean"
+ },
+ "sandbox_id": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Attach to an existing sandbox by id or name instead of creating a new one. The tool will never delete a sandbox it did not create.",
+ "title": "Sandbox Id"
+ },
+ "sandbox_timeout": {
+ "default": 60.0,
+ "description": "Timeout in seconds for sandbox create/delete operations.",
+ "title": "Sandbox Timeout",
+ "type": "number"
+ },
+ "target": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona target region. Falls back to DAYTONA_TARGET env var.",
+ "required": false,
+ "title": "Target"
+ }
+ },
+ "required": [],
+ "title": "DaytonaExecTool",
+ "type": "object"
+ },
+ "name": "DaytonaExecTool",
+ "package_dependencies": [
+ "daytona"
+ ],
+ "run_params_schema": {
+ "properties": {
+ "command": {
+ "description": "Shell command to execute in the sandbox.",
+ "title": "Command",
+ "type": "string"
+ },
+ "cwd": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Working directory to run the command in. Defaults to the sandbox work dir.",
+ "title": "Cwd"
+ },
+ "env": {
+ "anyOf": [
+ {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Optional environment variables to set for this command.",
+ "title": "Env"
+ },
+ "timeout": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Maximum seconds to wait for the command to finish.",
+ "title": "Timeout"
+ }
+ },
+ "required": [
+ "command"
+ ],
+ "title": "DaytonaExecToolSchema",
+ "type": "object"
+ }
+ },
+ {
+ "description": "Perform filesystem operations inside a Daytona sandbox: read a file, write content to a path, append content to an existing file, list a directory, delete a path, make a directory, or fetch file metadata. For files larger than a few KB, create the file with action='write' and empty content, then send the body via multiple 'append' calls of ~4KB each to stay within tool-call payload limits.",
+ "env_vars": [
+ {
+ "default": null,
+ "description": "API key for Daytona sandbox service",
+ "name": "DAYTONA_API_KEY",
+ "required": false
+ },
+ {
+ "default": null,
+ "description": "Daytona API base URL (optional)",
+ "name": "DAYTONA_API_URL",
+ "required": false
+ },
+ {
+ "default": null,
+ "description": "Daytona target region (optional)",
+ "name": "DAYTONA_TARGET",
+ "required": false
+ }
+ ],
+ "humanized_name": "Daytona Sandbox Files",
+ "init_params_schema": {
+ "$defs": {
+ "EnvVar": {
+ "properties": {
+ "default": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Default"
+ },
+ "description": {
+ "title": "Description",
+ "type": "string"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "required": {
+ "default": true,
+ "title": "Required",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "name",
+ "description"
+ ],
+ "title": "EnvVar",
+ "type": "object"
+ }
+ },
+ "description": "Read, write, and manage files inside a Daytona sandbox.\n\nNotes:\n - Most useful with `persistent=True` or an explicit `sandbox_id`. With the\n default ephemeral mode, files disappear when this tool call finishes.",
+ "properties": {
+ "api_key": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona API key. Falls back to DAYTONA_API_KEY env var.",
+ "required": false,
+ "title": "Api Key"
+ },
+ "api_url": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona API URL override. Falls back to DAYTONA_API_URL env var.",
+ "required": false,
+ "title": "Api Url"
+ },
+ "create_params": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Optional kwargs forwarded to CreateSandboxFromSnapshotParams when creating a sandbox (e.g. language, snapshot, env_vars, labels).",
+ "title": "Create Params"
+ },
+ "persistent": {
+ "default": false,
+ "description": "If True, reuse one sandbox across all calls to this tool instance and delete it at process exit. Default False creates and deletes a fresh sandbox per call.",
+ "title": "Persistent",
+ "type": "boolean"
+ },
+ "sandbox_id": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Attach to an existing sandbox by id or name instead of creating a new one. The tool will never delete a sandbox it did not create.",
+ "title": "Sandbox Id"
+ },
+ "sandbox_timeout": {
+ "default": 60.0,
+ "description": "Timeout in seconds for sandbox create/delete operations.",
+ "title": "Sandbox Timeout",
+ "type": "number"
+ },
+ "target": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona target region. Falls back to DAYTONA_TARGET env var.",
+ "required": false,
+ "title": "Target"
+ }
+ },
+ "required": [],
+ "title": "DaytonaFileTool",
+ "type": "object"
+ },
+ "name": "DaytonaFileTool",
+ "package_dependencies": [
+ "daytona"
+ ],
+ "run_params_schema": {
+ "properties": {
+ "action": {
+ "description": "The filesystem action to perform: 'read' (returns file contents), 'write' (create or replace a file with content), 'append' (append content to an existing file \u2014 use this for writing large files in chunks to avoid hitting tool-call size limits), 'list' (lists a directory), 'delete' (removes a file/dir), 'mkdir' (creates a directory), 'info' (returns file metadata).",
+ "enum": [
+ "read",
+ "write",
+ "append",
+ "list",
+ "delete",
+ "mkdir",
+ "info"
+ ],
+ "title": "Action",
+ "type": "string"
+ },
+ "binary": {
+ "default": false,
+ "description": "For 'write': treat content as base64 and upload raw bytes. For 'read': return contents as base64 instead of decoded utf-8.",
+ "title": "Binary",
+ "type": "boolean"
+ },
+ "content": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Content to write or append. If omitted for 'write', an empty file is created. For files larger than a few KB, prefer one 'write' with empty content followed by multiple 'append' calls of ~4KB each to stay within tool-call payload limits.",
+ "title": "Content"
+ },
+ "mode": {
+ "default": "0755",
+ "description": "For action='mkdir': octal permission string (default 0755).",
+ "title": "Mode",
+ "type": "string"
+ },
+ "path": {
+ "description": "Absolute path inside the sandbox.",
+ "title": "Path",
+ "type": "string"
+ },
+ "recursive": {
+ "default": false,
+ "description": "For action='delete': remove directories recursively.",
+ "title": "Recursive",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "action",
+ "path"
+ ],
+ "title": "DaytonaFileToolSchema",
+ "type": "object"
+ }
+ },
+ {
+ "description": "Execute a block of Python code inside a Daytona sandbox and return the exit code, captured stdout, and any produced artifacts. Use this for data processing, quick scripts, or analysis that should run in an isolated environment.",
+ "env_vars": [
+ {
+ "default": null,
+ "description": "API key for Daytona sandbox service",
+ "name": "DAYTONA_API_KEY",
+ "required": false
+ },
+ {
+ "default": null,
+ "description": "Daytona API base URL (optional)",
+ "name": "DAYTONA_API_URL",
+ "required": false
+ },
+ {
+ "default": null,
+ "description": "Daytona target region (optional)",
+ "name": "DAYTONA_TARGET",
+ "required": false
+ }
+ ],
+ "humanized_name": "Daytona Sandbox Python",
+ "init_params_schema": {
+ "$defs": {
+ "EnvVar": {
+ "properties": {
+ "default": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Default"
+ },
+ "description": {
+ "title": "Description",
+ "type": "string"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "required": {
+ "default": true,
+ "title": "Required",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "name",
+ "description"
+ ],
+ "title": "EnvVar",
+ "type": "object"
+ }
+ },
+ "description": "Run Python source inside a Daytona sandbox.",
+ "properties": {
+ "api_key": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona API key. Falls back to DAYTONA_API_KEY env var.",
+ "required": false,
+ "title": "Api Key"
+ },
+ "api_url": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona API URL override. Falls back to DAYTONA_API_URL env var.",
+ "required": false,
+ "title": "Api Url"
+ },
+ "create_params": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Optional kwargs forwarded to CreateSandboxFromSnapshotParams when creating a sandbox (e.g. language, snapshot, env_vars, labels).",
+ "title": "Create Params"
+ },
+ "persistent": {
+ "default": false,
+ "description": "If True, reuse one sandbox across all calls to this tool instance and delete it at process exit. Default False creates and deletes a fresh sandbox per call.",
+ "title": "Persistent",
+ "type": "boolean"
+ },
+ "sandbox_id": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Attach to an existing sandbox by id or name instead of creating a new one. The tool will never delete a sandbox it did not create.",
+ "title": "Sandbox Id"
+ },
+ "sandbox_timeout": {
+ "default": 60.0,
+ "description": "Timeout in seconds for sandbox create/delete operations.",
+ "title": "Sandbox Timeout",
+ "type": "number"
+ },
+ "target": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Daytona target region. Falls back to DAYTONA_TARGET env var.",
+ "required": false,
+ "title": "Target"
+ }
+ },
+ "required": [],
+ "title": "DaytonaPythonTool",
+ "type": "object"
+ },
+ "name": "DaytonaPythonTool",
+ "package_dependencies": [
+ "daytona"
+ ],
+ "run_params_schema": {
+ "properties": {
+ "argv": {
+ "anyOf": [
+ {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Optional argv passed to the script (forwarded as params.argv).",
+ "title": "Argv"
+ },
+ "code": {
+ "description": "Python source to execute inside the sandbox.",
+ "title": "Code",
+ "type": "string"
+ },
+ "env": {
+ "anyOf": [
+ {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Optional environment variables for the run (forwarded as params.env).",
+ "title": "Env"
+ },
+ "timeout": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Maximum seconds to wait for the code to finish.",
+ "title": "Timeout"
+ }
+ },
+ "required": [
+ "code"
+ ],
+ "title": "DaytonaPythonToolSchema",
+ "type": "object"
+ }
+ },
{
"description": "A tool that can be used to recursively list a directory's content.",
"env_vars": [],
diff --git a/lib/crewai/pyproject.toml b/lib/crewai/pyproject.toml
index e6ab043994..4956c81f0c 100644
--- a/lib/crewai/pyproject.toml
+++ b/lib/crewai/pyproject.toml
@@ -55,7 +55,7 @@ Repository = "https://github.com/crewAIInc/crewAI"
[project.optional-dependencies]
tools = [
- "crewai-tools==1.14.2",
+ "crewai-tools==1.14.3a1",
]
embeddings = [
"tiktoken~=0.8.0"
diff --git a/lib/crewai/src/crewai/__init__.py b/lib/crewai/src/crewai/__init__.py
index b6766741fb..444cb5d070 100644
--- a/lib/crewai/src/crewai/__init__.py
+++ b/lib/crewai/src/crewai/__init__.py
@@ -46,7 +46,7 @@ def filtered_warn(
_suppress_pydantic_deprecation_warnings()
-__version__ = "1.14.2"
+__version__ = "1.14.3a1"
_telemetry_submitted = False
diff --git a/lib/crewai/src/crewai/agent/core.py b/lib/crewai/src/crewai/agent/core.py
index b837523449..74a3e85ded 100644
--- a/lib/crewai/src/crewai/agent/core.py
+++ b/lib/crewai/src/crewai/agent/core.py
@@ -29,7 +29,7 @@
model_validator,
)
from pydantic.functional_serializers import PlainSerializer
-from typing_extensions import Self
+from typing_extensions import Self, TypeIs
from crewai.agent.planning_config import PlanningConfig
from crewai.agent.utils import (
@@ -133,6 +133,13 @@
}
+def _is_resuming_agent_executor(
+ executor: CrewAgentExecutor | AgentExecutor | None,
+) -> TypeIs[AgentExecutor]:
+ """Type guard: True when the executor is resuming from a checkpoint."""
+ return isinstance(executor, AgentExecutor) and executor._resuming
+
+
def _validate_executor_class(value: Any) -> Any:
if isinstance(value, str):
cls = _EXECUTOR_CLASS_MAP.get(value)
@@ -1366,24 +1373,42 @@ def _prepare_kickoff(
prompt, stop_words, rpm_limit_fn = self._build_execution_prompt(raw_tools)
- executor = AgentExecutor(
- llm=cast(BaseLLM, self.llm),
- agent=self,
- prompt=prompt,
- max_iter=self.max_iter,
- tools=parsed_tools,
- tools_names=get_tool_names(parsed_tools),
- stop_words=stop_words,
- tools_description=render_text_description_and_args(parsed_tools),
- tools_handler=self.tools_handler,
- original_tools=raw_tools,
- step_callback=self.step_callback,
- function_calling_llm=self.function_calling_llm,
- respect_context_window=self.respect_context_window,
- request_within_rpm_limit=rpm_limit_fn,
- callbacks=[TokenCalcHandler(self._token_process)],
- response_model=response_format,
- )
+ if _is_resuming_agent_executor(self.agent_executor):
+ executor = self.agent_executor
+ executor.tools = parsed_tools
+ executor.tools_names = get_tool_names(parsed_tools)
+ executor.tools_description = render_text_description_and_args(parsed_tools)
+ executor.original_tools = raw_tools
+ executor.prompt = prompt
+ executor.response_model = response_format
+ executor.stop_words = stop_words
+ executor.tools_handler = self.tools_handler
+ executor.step_callback = self.step_callback
+ executor.function_calling_llm = cast(
+ BaseLLM | None, self.function_calling_llm
+ )
+ executor.respect_context_window = self.respect_context_window
+ executor.request_within_rpm_limit = rpm_limit_fn
+ executor.callbacks = [TokenCalcHandler(self._token_process)]
+ else:
+ executor = AgentExecutor(
+ llm=cast(BaseLLM, self.llm),
+ agent=self,
+ prompt=prompt,
+ max_iter=self.max_iter,
+ tools=parsed_tools,
+ tools_names=get_tool_names(parsed_tools),
+ stop_words=stop_words,
+ tools_description=render_text_description_and_args(parsed_tools),
+ tools_handler=self.tools_handler,
+ original_tools=raw_tools,
+ step_callback=self.step_callback,
+ function_calling_llm=self.function_calling_llm,
+ respect_context_window=self.respect_context_window,
+ request_within_rpm_limit=rpm_limit_fn,
+ callbacks=[TokenCalcHandler(self._token_process)],
+ response_model=response_format,
+ )
all_files: dict[str, Any] = {}
if isinstance(messages, str):
@@ -1504,14 +1529,17 @@ def kickoff(
)
try:
- crewai_event_bus.emit(
- self,
- event=LiteAgentExecutionStartedEvent(
+ if self.checkpoint_kickoff_event_id is not None:
+ self._kickoff_event_id = self.checkpoint_kickoff_event_id
+ self.checkpoint_kickoff_event_id = None
+ else:
+ started_event = LiteAgentExecutionStartedEvent(
agent_info=agent_info,
tools=parsed_tools,
messages=messages,
- ),
- )
+ )
+ crewai_event_bus.emit(self, event=started_event)
+ self._kickoff_event_id = started_event.event_id
output = self._execute_and_build_output(executor, inputs, response_format)
return self._finalize_kickoff(
@@ -1808,14 +1836,17 @@ async def kickoff_async(
)
try:
- crewai_event_bus.emit(
- self,
- event=LiteAgentExecutionStartedEvent(
+ if self.checkpoint_kickoff_event_id is not None:
+ self._kickoff_event_id = self.checkpoint_kickoff_event_id
+ self.checkpoint_kickoff_event_id = None
+ else:
+ started_event = LiteAgentExecutionStartedEvent(
agent_info=agent_info,
tools=parsed_tools,
messages=messages,
- ),
- )
+ )
+ crewai_event_bus.emit(self, event=started_event)
+ self._kickoff_event_id = started_event.event_id
output = await self._execute_and_build_output_async(
executor, inputs, response_format
diff --git a/lib/crewai/src/crewai/agents/agent_builder/base_agent.py b/lib/crewai/src/crewai/agents/agent_builder/base_agent.py
index a00f9b49f2..74d30e0b2a 100644
--- a/lib/crewai/src/crewai/agents/agent_builder/base_agent.py
+++ b/lib/crewai/src/crewai/agents/agent_builder/base_agent.py
@@ -28,6 +28,9 @@
from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess
from crewai.agents.cache.cache_handler import CacheHandler
from crewai.agents.tools_handler import ToolsHandler
+from crewai.events.base_events import set_emission_counter
+from crewai.events.event_bus import crewai_event_bus
+from crewai.events.event_context import restore_event_scope, set_last_event_id
from crewai.knowledge.knowledge import Knowledge
from crewai.knowledge.knowledge_config import KnowledgeConfig
from crewai.knowledge.source.base_knowledge_source import BaseKnowledgeSource
@@ -51,6 +54,7 @@
if TYPE_CHECKING:
from crewai.context import ExecutionContext
from crewai.crew import Crew
+ from crewai.state.runtime import RuntimeState
def _validate_crew_ref(value: Any) -> Any:
@@ -219,6 +223,7 @@ class BaseAgent(BaseModel, ABC, metaclass=AgentMeta):
_original_goal: str | None = PrivateAttr(default=None)
_original_backstory: str | None = PrivateAttr(default=None)
_token_process: TokenProcess = PrivateAttr(default_factory=TokenProcess)
+ _kickoff_event_id: str | None = PrivateAttr(default=None)
id: UUID4 = Field(default_factory=uuid.uuid4, frozen=True)
role: str = Field(description="Role of the agent")
goal: str = Field(description="Objective of the agent")
@@ -335,30 +340,90 @@ def _validate_agent_executor(cls, v: Any) -> Any:
min_length=1,
)
execution_context: ExecutionContext | None = Field(default=None)
+ checkpoint_kickoff_event_id: str | None = Field(default=None)
@classmethod
def from_checkpoint(cls, config: CheckpointConfig) -> Self:
- """Restore an Agent from a checkpoint.
+ """Restore an Agent from a checkpoint, ready to resume via kickoff().
Args:
- config: Checkpoint configuration with ``restore_from`` set.
+ config: Checkpoint configuration with ``restore_from`` set to
+ the path of the checkpoint to load.
+
+ Returns:
+ An Agent instance. Call kickoff() to resume execution.
"""
from crewai.context import apply_execution_context
from crewai.state.runtime import RuntimeState
state = RuntimeState.from_checkpoint(config, context={"from_checkpoint": True})
+ crewai_event_bus.set_runtime_state(state)
for entity in state.root:
if isinstance(entity, cls):
if entity.execution_context is not None:
apply_execution_context(entity.execution_context)
- if entity.agent_executor is not None:
- entity.agent_executor.agent = entity
- entity.agent_executor._resuming = True
+ entity._restore_runtime(state)
return entity
raise ValueError(
f"No {cls.__name__} found in checkpoint: {config.restore_from}"
)
+ @classmethod
+ def fork(cls, config: CheckpointConfig, branch: str | None = None) -> Self:
+ """Fork an Agent from a checkpoint, creating a new execution branch.
+
+ Args:
+ config: Checkpoint configuration with ``restore_from`` set.
+ branch: Branch label for the fork. Auto-generated if not provided.
+
+ Returns:
+ An Agent instance on the new branch. Call kickoff() to run.
+ """
+ agent = cls.from_checkpoint(config)
+ state = crewai_event_bus._runtime_state
+ if state is None:
+ raise RuntimeError("Cannot fork: no runtime state on the event bus.")
+ state.fork(branch)
+ return agent
+
+ def _restore_runtime(self, state: RuntimeState) -> None:
+ """Re-create runtime objects after restoring from a checkpoint.
+
+ Args:
+ state: The RuntimeState containing the event record.
+ """
+ if self.agent_executor is not None:
+ self.agent_executor.agent = self
+ self.agent_executor._resuming = True
+ if self.checkpoint_kickoff_event_id is not None:
+ self._kickoff_event_id = self.checkpoint_kickoff_event_id
+ self._restore_event_scope(state)
+
+ def _restore_event_scope(self, state: RuntimeState) -> None:
+ """Rebuild the event scope stack from the checkpoint's event record.
+
+ Args:
+ state: The RuntimeState containing the event record.
+ """
+ stack: list[tuple[str, str]] = []
+ kickoff_id = self._kickoff_event_id
+ if kickoff_id:
+ stack.append((kickoff_id, "lite_agent_execution_started"))
+
+ restore_event_scope(tuple(stack))
+
+ last_event_id: str | None = None
+ max_seq = 0
+ for node in state.event_record.nodes.values():
+ seq = node.event.emission_sequence or 0
+ if seq > max_seq:
+ max_seq = seq
+ last_event_id = node.event.event_id
+ if last_event_id is not None:
+ set_last_event_id(last_event_id)
+ if max_seq > 0:
+ set_emission_counter(max_seq)
+
@model_validator(mode="before")
@classmethod
def process_model_config(cls, values: Any) -> dict[str, Any]:
diff --git a/lib/crewai/src/crewai/cli/checkpoint_cli.py b/lib/crewai/src/crewai/cli/checkpoint_cli.py
index 5e8572c628..0b3139d7d7 100644
--- a/lib/crewai/src/crewai/cli/checkpoint_cli.py
+++ b/lib/crewai/src/crewai/cli/checkpoint_cli.py
@@ -472,6 +472,8 @@ def _entity_type_from_meta(meta: dict[str, Any]) -> str:
for ent in meta.get("entities", []):
if ent.get("type") == "flow":
return "flow"
+ if ent.get("type") == "agent":
+ return "agent"
return "crew"
@@ -505,6 +507,11 @@ def resume_checkpoint(location: str, checkpoint_id: str | None) -> None:
flow = Flow.from_checkpoint(config)
result = asyncio.run(flow.kickoff_async(inputs=inputs))
+ elif entity_type == "agent":
+ from crewai.agent import Agent
+
+ agent = Agent.from_checkpoint(config)
+ result = asyncio.run(agent.akickoff(messages="Resume execution."))
else:
from crewai.crew import Crew
diff --git a/lib/crewai/src/crewai/cli/checkpoint_tui.py b/lib/crewai/src/crewai/cli/checkpoint_tui.py
index 0f945dd317..7cc1d6867e 100644
--- a/lib/crewai/src/crewai/cli/checkpoint_tui.py
+++ b/lib/crewai/src/crewai/cli/checkpoint_tui.py
@@ -123,7 +123,7 @@ def _entity_icon(etype: str) -> str:
str,
dict[str, Any] | None,
dict[int, str] | None,
- Literal["crew", "flow"],
+ Literal["crew", "flow", "agent"],
]
| None
)
@@ -682,10 +682,14 @@ def _collect_task_overrides(self) -> dict[int, str] | None:
overrides[task_idx] = editor.text
return overrides or None
- def _detect_entity_type(self, entry: dict[str, Any]) -> Literal["crew", "flow"]:
+ def _detect_entity_type(
+ self, entry: dict[str, Any]
+ ) -> Literal["crew", "flow", "agent"]:
for ent in entry.get("entities", []):
if ent.get("type") == "flow":
return "flow"
+ if ent.get("type") == "agent":
+ return "agent"
return "crew"
def _resolve_location(self, entry: dict[str, Any]) -> str:
@@ -829,6 +833,21 @@ async def _run_checkpoint_tui_async(location: str) -> None:
click.echo(f"\nResult: {getattr(result, 'raw', result)}")
return
+ if entity_type == "agent":
+ from crewai.agent import Agent
+
+ if action == "fork":
+ click.echo(f"\nForking agent from: {selected}\n")
+ agent = Agent.fork(config)
+ else:
+ click.echo(f"\nResuming agent from: {selected}\n")
+ agent = Agent.from_checkpoint(config)
+
+ click.echo()
+ result = await agent.akickoff(messages="Resume execution.")
+ click.echo(f"\nResult: {getattr(result, 'raw', result)}")
+ return
+
from crewai.crew import Crew
if action == "fork":
diff --git a/lib/crewai/src/crewai/cli/templates/crew/pyproject.toml b/lib/crewai/src/crewai/cli/templates/crew/pyproject.toml
index c9bddf9754..bf87e3131c 100644
--- a/lib/crewai/src/crewai/cli/templates/crew/pyproject.toml
+++ b/lib/crewai/src/crewai/cli/templates/crew/pyproject.toml
@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
authors = [{ name = "Your Name", email = "you@example.com" }]
requires-python = ">=3.10,<3.14"
dependencies = [
- "crewai[tools]==1.14.2"
+ "crewai[tools]==1.14.3a1"
]
[project.scripts]
diff --git a/lib/crewai/src/crewai/cli/templates/flow/pyproject.toml b/lib/crewai/src/crewai/cli/templates/flow/pyproject.toml
index ddb2750e91..bdd4fd2a44 100644
--- a/lib/crewai/src/crewai/cli/templates/flow/pyproject.toml
+++ b/lib/crewai/src/crewai/cli/templates/flow/pyproject.toml
@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
authors = [{ name = "Your Name", email = "you@example.com" }]
requires-python = ">=3.10,<3.14"
dependencies = [
- "crewai[tools]==1.14.2"
+ "crewai[tools]==1.14.3a1"
]
[project.scripts]
diff --git a/lib/crewai/src/crewai/cli/templates/tool/pyproject.toml b/lib/crewai/src/crewai/cli/templates/tool/pyproject.toml
index a84300df43..fcd42c81f2 100644
--- a/lib/crewai/src/crewai/cli/templates/tool/pyproject.toml
+++ b/lib/crewai/src/crewai/cli/templates/tool/pyproject.toml
@@ -5,7 +5,7 @@ description = "Power up your crews with {{folder_name}}"
readme = "README.md"
requires-python = ">=3.10,<3.14"
dependencies = [
- "crewai[tools]==1.14.2"
+ "crewai[tools]==1.14.3a1"
]
[tool.crewai]
diff --git a/lib/crewai/src/crewai/llms/providers/gemini/completion.py b/lib/crewai/src/crewai/llms/providers/gemini/completion.py
index 1b2fb26cb2..f7fd0f61ed 100644
--- a/lib/crewai/src/crewai/llms/providers/gemini/completion.py
+++ b/lib/crewai/src/crewai/llms/providers/gemini/completion.py
@@ -976,6 +976,7 @@ def _process_stream_chunk(
"id": call_id,
"name": part.function_call.name,
"args": args_dict,
+ "raw_part": part,
}
self._emit_stream_chunk_event(
@@ -1060,29 +1061,20 @@ def _finalize_streaming_response(
if call_data.get("name") != STRUCTURED_OUTPUT_TOOL_NAME
}
- # If there are function calls but no available_functions,
- # return them for the executor to handle
if non_structured_output_calls and not available_functions:
- formatted_function_calls = [
- {
- "id": call_data["id"],
- "function": {
- "name": call_data["name"],
- "arguments": json.dumps(call_data["args"]),
- },
- "type": "function",
- }
+ raw_parts = [
+ call_data["raw_part"]
for call_data in non_structured_output_calls.values()
]
self._emit_call_completed_event(
- response=formatted_function_calls,
+ response=raw_parts,
call_type=LLMCallType.TOOL_CALL,
from_task=from_task,
from_agent=from_agent,
messages=self._convert_contents_to_dict(contents),
usage=usage_data,
)
- return formatted_function_calls
+ return raw_parts
# Handle completed function calls (excluding structured_output)
if non_structured_output_calls and available_functions:
diff --git a/lib/crewai/src/crewai/state/runtime.py b/lib/crewai/src/crewai/state/runtime.py
index daae0620e5..3243d4c198 100644
--- a/lib/crewai/src/crewai/state/runtime.py
+++ b/lib/crewai/src/crewai/state/runtime.py
@@ -44,9 +44,12 @@ def _sync_checkpoint_fields(entity: object) -> None:
entity: The entity whose private runtime attributes will be
copied into its public checkpoint fields.
"""
+ from crewai.agents.agent_builder.base_agent import BaseAgent
from crewai.crew import Crew
from crewai.flow.flow import Flow
+ if isinstance(entity, BaseAgent):
+ entity.checkpoint_kickoff_event_id = entity._kickoff_event_id
if isinstance(entity, Flow):
entity.checkpoint_completed_methods = (
set(entity._completed_methods) if entity._completed_methods else None
diff --git a/lib/crewai/src/crewai/task.py b/lib/crewai/src/crewai/task.py
index e12caa2af5..1cac87cb89 100644
--- a/lib/crewai/src/crewai/task.py
+++ b/lib/crewai/src/crewai/task.py
@@ -1241,12 +1241,26 @@ def _invoke_guardrail_function(
tools=tools,
)
- pydantic_output, json_output = self._export_output(result)
+ if isinstance(result, BaseModel):
+ raw = result.model_dump_json()
+ if self.output_pydantic:
+ pydantic_output = result
+ json_output = None
+ elif self.output_json:
+ pydantic_output = None
+ json_output = result.model_dump()
+ else:
+ pydantic_output = None
+ json_output = None
+ else:
+ raw = result
+ pydantic_output, json_output = self._export_output(result)
+
task_output = TaskOutput(
name=self.name or self.description,
description=self.description,
expected_output=self.expected_output,
- raw=result,
+ raw=raw,
pydantic=pydantic_output,
json_dict=json_output,
agent=agent.role,
@@ -1337,12 +1351,26 @@ async def _ainvoke_guardrail_function(
tools=tools,
)
- pydantic_output, json_output = self._export_output(result)
+ if isinstance(result, BaseModel):
+ raw = result.model_dump_json()
+ if self.output_pydantic:
+ pydantic_output = result
+ json_output = None
+ elif self.output_json:
+ pydantic_output = None
+ json_output = result.model_dump()
+ else:
+ pydantic_output = None
+ json_output = None
+ else:
+ raw = result
+ pydantic_output, json_output = self._export_output(result)
+
task_output = TaskOutput(
name=self.name or self.description,
description=self.description,
expected_output=self.expected_output,
- raw=result,
+ raw=raw,
pydantic=pydantic_output,
json_dict=json_output,
agent=agent.role,
diff --git a/lib/crewai/tests/test_checkpoint.py b/lib/crewai/tests/test_checkpoint.py
index b1ad9e2df4..d92a24803c 100644
--- a/lib/crewai/tests/test_checkpoint.py
+++ b/lib/crewai/tests/test_checkpoint.py
@@ -562,3 +562,75 @@ def test_flow_kickoff_delegates_to_from_checkpoint(self) -> None:
)
assert mock_restored.checkpoint.restore_from is None
assert result == "flow_result"
+
+
+# ---------- Agent checkpoint/fork ----------
+
+
+class TestAgentCheckpoint:
+ def _make_agent_state(self) -> RuntimeState:
+ agent = Agent(role="r", goal="g", backstory="b", llm="gpt-4o-mini")
+ return RuntimeState(root=[agent])
+
+ def test_agent_from_checkpoint_sets_runtime_state(self) -> None:
+ state = self._make_agent_state()
+ state._provider = JsonProvider()
+ with tempfile.TemporaryDirectory() as d:
+ loc = state.checkpoint(d)
+ cfg = CheckpointConfig(restore_from=loc)
+
+ from crewai.events.event_bus import crewai_event_bus
+
+ crewai_event_bus._runtime_state = None
+ Agent.from_checkpoint(cfg)
+ assert crewai_event_bus._runtime_state is not None
+
+ def test_agent_fork_sets_branch(self) -> None:
+ state = self._make_agent_state()
+ state._provider = JsonProvider()
+ with tempfile.TemporaryDirectory() as d:
+ loc = state.checkpoint(d)
+ cfg = CheckpointConfig(restore_from=loc)
+
+ from crewai.events.event_bus import crewai_event_bus
+
+ Agent.fork(cfg, branch="agent-experiment")
+ rt = crewai_event_bus._runtime_state
+ assert rt is not None
+ assert rt._branch == "agent-experiment"
+
+ def test_agent_fork_auto_branch(self) -> None:
+ state = self._make_agent_state()
+ state._provider = JsonProvider()
+ with tempfile.TemporaryDirectory() as d:
+ loc = state.checkpoint(d)
+ cfg = CheckpointConfig(restore_from=loc)
+
+ from crewai.events.event_bus import crewai_event_bus
+
+ Agent.fork(cfg)
+ rt = crewai_event_bus._runtime_state
+ assert rt is not None
+ assert rt._branch.startswith("fork/")
+
+ def test_sync_checkpoint_fields_agent(self) -> None:
+ from crewai.state.runtime import _sync_checkpoint_fields
+
+ agent = Agent(role="r", goal="g", backstory="b", llm="gpt-4o-mini")
+ agent._kickoff_event_id = "evt-123"
+ _sync_checkpoint_fields(agent)
+ assert agent.checkpoint_kickoff_event_id == "evt-123"
+
+ def test_agent_restore_kickoff_event_id(self) -> None:
+ agent = Agent(role="r", goal="g", backstory="b", llm="gpt-4o-mini")
+ agent._kickoff_event_id = "evt-456"
+ state = RuntimeState(root=[agent])
+ state._provider = JsonProvider()
+ with tempfile.TemporaryDirectory() as d:
+ from crewai.state.runtime import _prepare_entities
+
+ _prepare_entities(state.root)
+ loc = state.checkpoint(d)
+ cfg = CheckpointConfig(restore_from=loc)
+ restored = Agent.from_checkpoint(cfg)
+ assert restored._kickoff_event_id == "evt-456"
diff --git a/lib/devtools/src/crewai_devtools/__init__.py b/lib/devtools/src/crewai_devtools/__init__.py
index 9c20314911..9df2b583d5 100644
--- a/lib/devtools/src/crewai_devtools/__init__.py
+++ b/lib/devtools/src/crewai_devtools/__init__.py
@@ -1,3 +1,3 @@
"""CrewAI development tools."""
-__version__ = "1.14.2"
+__version__ = "1.14.3a1"
diff --git a/uv.lock b/uv.lock
index ad1bf72761..7451cfa170 100644
--- a/uv.lock
+++ b/uv.lock
@@ -13,7 +13,7 @@ resolution-markers = [
]
[options]
-exclude-newer = "2026-04-17T16:00:00Z"
+exclude-newer = "2026-04-18T07:00:00Z"
[manifest]
members = [
@@ -240,6 +240,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" },
]
+[[package]]
+name = "aiohttp-retry"
+version = "2.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9d/61/ebda4d8e3d8cfa1fd3db0fb428db2dd7461d5742cea35178277ad180b033/aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1", size = 13608, upload-time = "2024-11-06T10:44:54.574Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1a/99/84ba7273339d0f3dfa57901b846489d2e5c2cd731470167757f1935fffbd/aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54", size = 9981, upload-time = "2024-11-06T10:44:52.917Z" },
+]
+
[[package]]
name = "aioitertools"
version = "0.13.0"
@@ -1474,6 +1486,9 @@ couchbase = [
databricks-sdk = [
{ name = "databricks-sdk" },
]
+daytona = [
+ { name = "daytona" },
+]
exa-py = [
{ name = "exa-py" },
]
@@ -1574,6 +1589,7 @@ requires-dist = [
{ name = "crewai", editable = "lib/crewai" },
{ name = "cryptography", marker = "extra == 'snowflake'", specifier = ">=43.0.3" },
{ name = "databricks-sdk", marker = "extra == 'databricks-sdk'", specifier = ">=0.46.0" },
+ { name = "daytona", marker = "extra == 'daytona'", specifier = "~=0.140.0" },
{ name = "exa-py", marker = "extra == 'exa-py'", specifier = ">=1.8.7" },
{ name = "firecrawl-py", marker = "extra == 'firecrawl-py'", specifier = ">=1.8.0" },
{ name = "gitpython", marker = "extra == 'github'", specifier = ">=3.1.41,<4" },
@@ -1616,7 +1632,7 @@ requires-dist = [
{ name = "weaviate-client", marker = "extra == 'weaviate-client'", specifier = ">=4.10.2" },
{ name = "youtube-transcript-api", specifier = "~=1.2.2" },
]
-provides-extras = ["apify", "beautifulsoup4", "bedrock", "browserbase", "composio-core", "contextual", "couchbase", "databricks-sdk", "exa-py", "firecrawl-py", "github", "hyperbrowser", "linkup-sdk", "mcp", "mongodb", "multion", "mysql", "oxylabs", "patronus", "postgresql", "qdrant-client", "rag", "scrapegraph-py", "scrapfly-sdk", "selenium", "serpapi", "singlestore", "snowflake", "spider-client", "sqlalchemy", "stagehand", "tavily-python", "weaviate-client", "xml"]
+provides-extras = ["apify", "beautifulsoup4", "bedrock", "browserbase", "composio-core", "contextual", "couchbase", "databricks-sdk", "daytona", "exa-py", "firecrawl-py", "github", "hyperbrowser", "linkup-sdk", "mcp", "mongodb", "multion", "mysql", "oxylabs", "patronus", "postgresql", "qdrant-client", "rag", "scrapegraph-py", "scrapfly-sdk", "selenium", "serpapi", "singlestore", "snowflake", "spider-client", "sqlalchemy", "stagehand", "tavily-python", "weaviate-client", "xml"]
[[package]]
name = "cryptography"
@@ -1784,6 +1800,94 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" },
]
+[[package]]
+name = "daytona"
+version = "0.140.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiofiles" },
+ { name = "daytona-api-client" },
+ { name = "daytona-api-client-async" },
+ { name = "daytona-toolbox-api-client" },
+ { name = "daytona-toolbox-api-client-async" },
+ { name = "deprecated" },
+ { name = "environs" },
+ { name = "httpx" },
+ { name = "multipart" },
+ { name = "obstore" },
+ { name = "pydantic" },
+ { name = "toml" },
+ { name = "websockets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/31/d4/4362b885f461ca2849f873c98e08594acb89d80ab82644ac88cdb4b7f8e9/daytona-0.140.0.tar.gz", hash = "sha256:8fa6dcc28ec735a9255d02cd98350b819fcf83daab866e688f659760c22bbfbf", size = 121616, upload-time = "2026-02-10T12:20:34.299Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/18/531ec599ff19adc9561ebfc5bdc5e5483fbb47e00d392376e69a259ed384/daytona-0.140.0-py3-none-any.whl", hash = "sha256:93a85d2c76e7e3dccbd708784026a61cd977ebfde37ed0777966c2e702918662", size = 150607, upload-time = "2026-02-10T12:20:32.889Z" },
+]
+
+[[package]]
+name = "daytona-api-client"
+version = "0.140.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e4/7e/64676a69f357be5a32154240c89d145090d76c6706652e50137997f2fcab/daytona_api_client-0.140.0.tar.gz", hash = "sha256:ed28b3337189393d2766697c98d1b764dea4fda82359040e6f8d111f5d073aef", size = 134360, upload-time = "2026-02-10T12:19:35.791Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/11/79/17fd48a00c5aea1386f46a232f8af03014ec827c7c6ea46a2e192cddedbd/daytona_api_client-0.140.0-py3-none-any.whl", hash = "sha256:6a0ba0b4483da23f6557e18350de292b727a663874fd82aac3ae21a444d55215", size = 375797, upload-time = "2026-02-10T12:19:33.987Z" },
+]
+
+[[package]]
+name = "daytona-api-client-async"
+version = "0.140.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "aiohttp-retry" },
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e3/e3/f5dcfa17f02988899427d1b898f6176922787b8cb361e0a42d962ca319b2/daytona_api_client_async-0.140.0.tar.gz", hash = "sha256:dc6c7126649162bbe31e3da665b421165f52407d34598f8ec89617650456949e", size = 134486, upload-time = "2026-02-10T12:19:50.396Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1b/74/0a13a70d19756da1987369820d6bac0c704cffdc684b0e237ccbabf8ffb0/daytona_api_client_async-0.140.0-py3-none-any.whl", hash = "sha256:404ea5492714f6f82d2afbaaa722b87e5f2f9d419dfd28ec37c0a1edad408fb1", size = 378645, upload-time = "2026-02-10T12:19:48.434Z" },
+]
+
+[[package]]
+name = "daytona-toolbox-api-client"
+version = "0.140.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7b/f1/b04957487ef7b6de4a45ba5348123f6b8ed18325fa6e5bf3eea71c0a387d/daytona_toolbox_api_client-0.140.0.tar.gz", hash = "sha256:b7421327fd5f45168ab5d1579cfdceae55356fb3da5939d13d9087ae49f79945", size = 64094, upload-time = "2026-02-10T12:19:40.882Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b2/d5/08136d47cfec7199081f6a6ccf8e19992425bff091a9c97fdf6872de8a40/daytona_toolbox_api_client-0.140.0-py3-none-any.whl", hash = "sha256:4d71842b461e2a3123e563475964ddda78884d012286d950c9d947a0d2779d07", size = 171059, upload-time = "2026-02-10T12:19:39.107Z" },
+]
+
+[[package]]
+name = "daytona-toolbox-api-client-async"
+version = "0.140.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "aiohttp-retry" },
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a6/52/2a1b5fe303f4ea116ade0fe09dd85eba349a67318b83c74f7d2808a42905/daytona_toolbox_api_client_async-0.140.0.tar.gz", hash = "sha256:62a4b51404db28e95e18da836c8de0d2b67192d42027bc3c9273937d3066612b", size = 61090, upload-time = "2026-02-10T12:20:02.273Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9c/ca/0113aba439cad635a1ecaf4ac50c9a8248002d529b2c44d02f80ec08f503/daytona_toolbox_api_client_async-0.140.0-py3-none-any.whl", hash = "sha256:dddf18320449234ed62ce8d051f470ecaac0f56bf23e800c0bf51b11b5251d17", size = 172380, upload-time = "2026-02-10T12:20:01.005Z" },
+]
+
[[package]]
name = "decli"
version = "0.6.3"
@@ -2046,6 +2150,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" },
]
+[[package]]
+name = "environs"
+version = "14.6.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "marshmallow" },
+ { name = "python-dotenv" },
+ { name = "typing-extensions", marker = "python_full_version < '3.11'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fb/c7/94f97e6e74482a50b5fc798856b6cc06e8d072ab05a0b74cb5d87bd0d065/environs-14.6.0.tar.gz", hash = "sha256:ed2767588deb503209ffe4dd9bb2b39311c2e4e7e27ce2c64bf62ca83328d068", size = 35563, upload-time = "2026-02-20T04:02:08.869Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/97/a8/c070e1340636acb38d4e6a7e45c46d168a462b48b9b3257e14ca0e5af79b/environs-14.6.0-py3-none-any.whl", hash = "sha256:f8fb3d6c6a55872b0c6db077a28f5a8c7b8984b7c32029613d44cef95cfc0812", size = 17205, upload-time = "2026-02-20T04:02:07.299Z" },
+]
+
[[package]]
name = "et-xmlfile"
version = "2.0.0"
@@ -4480,6 +4598,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/9e/b7f6b33222978688afc613e25e73776076e996cb5e545e37af8e373d3b3c/multion-1.1.0-py3-none-any.whl", hash = "sha256:6a4ffa2d71c5667e41492993e7136fa71eb4b52f0c11914f3a737ffd543195ca", size = 39968, upload-time = "2024-04-25T03:43:12.22Z" },
]
+[[package]]
+name = "multipart"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8e/d6/9c4f366d6f9bb8f8fb5eae3acac471335c39510c42b537fd515213d7d8c3/multipart-1.3.1.tar.gz", hash = "sha256:211d7cfc1a7a43e75c4d24ee0e8e0f4f61d522f1a21575303ae85333dea687bf", size = 38929, upload-time = "2026-02-27T10:17:13.7Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/19/ed/e1f03200ee1f0bf4a2b9b72709afefbf5319b68df654e0b84b35c65613ee/multipart-1.3.1-py3-none-any.whl", hash = "sha256:a82b59e1befe74d3d30b3d3f70efd5a2eba4d938f845dcff9faace968888ff29", size = 15061, upload-time = "2026-02-27T10:17:11.943Z" },
+]
+
[[package]]
name = "multiprocess"
version = "0.70.19"
@@ -4945,6 +5072,81 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" },
]
+[[package]]
+name = "obstore"
+version = "0.8.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/8c/9ec984edd0f3b72226adfaa19b1c61b15823b35b52f311ca4af36d009d15/obstore-0.8.2.tar.gz", hash = "sha256:a467bc4e97169e2ba749981b4fd0936015428d9b8f3fb83a5528536b1b6f377f", size = 168852, upload-time = "2025-09-16T15:34:55.786Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e1/e9/0a1e340ef262f225ad71f556ccba257896f85ca197f02cd228fe5e20b45a/obstore-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:49104c0d72688c180af015b02c691fbb6cf6a45b03a9d71b84059ed92dbec704", size = 3622821, upload-time = "2025-09-16T15:32:53.79Z" },
+ { url = "https://files.pythonhosted.org/packages/24/86/2b53e8b0a838dbbf89ef5dfddde888770bc1a993c691698dae411a407228/obstore-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c49776abd416e4d80d003213522d82ad48ed3517bee27a6cf8ce0f0cf4e6337e", size = 3356349, upload-time = "2025-09-16T15:32:55.715Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/79/1ba6dc854d7de7704a2c474d723ffeb01b6884f72eea7cbe128efc472f4a/obstore-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1636372b5e171a98369612d122ea20b955661daafa6519ed8322f4f0cb43ff74", size = 3454842, upload-time = "2025-09-16T15:32:57.072Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/03/ca67ccc9b9e63cfc0cd069b84437807fed4ef880be1e445b3f29d11518e0/obstore-0.8.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2efed0d86ad4ebffcbe3d0c4d84f26c2c6b20287484a0a748499c169a8e1f2c4", size = 3688363, upload-time = "2025-09-16T15:32:58.164Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/2f/c78eb4352d8be64a072934fe3ff2af79a1d06f4571af7c70d96f9741766b/obstore-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00c5542616dc5608de82ab6f6820633c9dbab6ff048e770fb8a5fcd1d30cd656", size = 3960133, upload-time = "2025-09-16T15:32:59.614Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/34/9e828d19194e227fd9f1d2dd70710da99c2bd2cd728686d59ea80be10b7c/obstore-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9df46aaf25ce80fff48c53382572adc67b6410611660b798024450281a3129", size = 3925493, upload-time = "2025-09-16T15:33:00.923Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/7d/9ec5967f3e2915fbc441f72c3892a7f0fb3618e3ae5c8a44181ce4aa641c/obstore-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ccf0f03a7fe453fb8640611c922bce19f021c6aaeee6ee44d6d8fb57db6be48", size = 3769401, upload-time = "2025-09-16T15:33:02.373Z" },
+ { url = "https://files.pythonhosted.org/packages/85/bf/00b65013068bde630a7369610a2dae4579315cd6ce82d30e3d23315cf308/obstore-0.8.2-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:ddfbfadc88c5e9740b687ef0833384329a56cea07b34f44e1c4b00a0e97d94a9", size = 3534383, upload-time = "2025-09-16T15:33:03.903Z" },
+ { url = "https://files.pythonhosted.org/packages/52/39/1b684fd96c9a33974fc52f417c52b42c1d50df40b44e588853c4a14d9ab1/obstore-0.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:53ad53bb16e64102f39559ec470efd78a5272b5e3b84c53aa0423993ac5575c1", size = 3697939, upload-time = "2025-09-16T15:33:05.355Z" },
+ { url = "https://files.pythonhosted.org/packages/85/58/93a2c78935f17fde7e22842598a6373e46a9c32d0243ec3b26b5da92df27/obstore-0.8.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:b0b905b46354db0961ab818cad762b9c1ac154333ae5d341934c90635a6bd7ab", size = 3681746, upload-time = "2025-09-16T15:33:09.344Z" },
+ { url = "https://files.pythonhosted.org/packages/38/90/225c2972338d18f92e7a56f71e34df6935b0b1bd7458bb6a0d2bd4d48f92/obstore-0.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fee235694406ebb2dc4178752cf5587f471d6662659b082e9786c716a0a9465c", size = 3765156, upload-time = "2025-09-16T15:33:10.457Z" },
+ { url = "https://files.pythonhosted.org/packages/79/eb/aca27e895bfcbbcd2bf05ea6a2538a94b718e6f6d72986e16ab158b753ec/obstore-0.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c36faf7ace17dd0832aa454118a63ea21862e3d34f71b9297d0c788d00f4985", size = 3941190, upload-time = "2025-09-16T15:33:11.59Z" },
+ { url = "https://files.pythonhosted.org/packages/33/ce/c8251a397e7507521768f05bc355b132a0daaff3739e861e51fa6abd821e/obstore-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:948a1db1d34f88cfc7ab7e0cccdcfd84cf3977365634599c95ba03b4ef80d1c4", size = 3970041, upload-time = "2025-09-16T15:33:13.035Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/c4/018f90701f1e5ea3fbd57f61463f42e1ef5218e548d3adcf12b6be021c34/obstore-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2edaa97687c191c5324bb939d72f6fe86a7aa8191c410f1648c14e8296d05c1c", size = 3622568, upload-time = "2025-09-16T15:33:14.196Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/62/72dd1e7d52fc554bb1fdb1a9499bda219cf3facea5865a1d97fdc00b3a1b/obstore-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c4fb7ef8108f08d14edc8bec9e9a6a2e5c4d14eddb8819f5d0da498aff6e8888", size = 3356109, upload-time = "2025-09-16T15:33:15.315Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/ae/089fe5b9207091252fe5ce352551214f04560f85eb8f2cc4f716a6a1a57e/obstore-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fda8f658c0edf799ab1e264f9b12c7c184cd09a5272dc645d42e987810ff2772", size = 3454588, upload-time = "2025-09-16T15:33:16.421Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/10/1865ae2d1ba45e8ae85fb0c1aada2dc9533baf60c4dfe74dab905348d74a/obstore-0.8.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87fe2bc15ce4051ecb56abd484feca323c2416628beb62c1c7b6712114564d6e", size = 3688627, upload-time = "2025-09-16T15:33:17.604Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/09/5d7ba6d0aeac563ea5f5586401c677bace4f782af83522b1fdf15430e152/obstore-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2482aa2562ab6a4ca40250b26bea33f8375b59898a9b5615fd412cab81098123", size = 3959896, upload-time = "2025-09-16T15:33:18.789Z" },
+ { url = "https://files.pythonhosted.org/packages/16/15/2b3eda59914761a9ff4d840e2daec5697fd29b293bd18d3dc11c593aed06/obstore-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4153b928f5d2e9c6cb645e83668a53e0b42253d1e8bcb4e16571fc0a1434599a", size = 3933162, upload-time = "2025-09-16T15:33:19.935Z" },
+ { url = "https://files.pythonhosted.org/packages/14/7a/5fc63b41526587067537fb1498c59a210884664c65ccf0d1f8f823b0875a/obstore-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbfa9c38620cc191be98c8b5558c62071e495dc6b1cc724f38293ee439aa9f92", size = 3769605, upload-time = "2025-09-16T15:33:21.389Z" },
+ { url = "https://files.pythonhosted.org/packages/77/4e/2208ab6e1fc021bf8b7e117249a10ab75d0ed24e0f2de1a8d7cd67d885b5/obstore-0.8.2-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:0822836eae8d52499f10daef17f26855b4c123119c6eb984aa4f2d525ec2678d", size = 3534396, upload-time = "2025-09-16T15:33:22.574Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/8f/a0e2882edd6bd285c82b8a5851c4ecf386c93fe75b6e340d5d9d30e809fc/obstore-0.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8ef6435dfd586d83b4f778e7927a5d5b0d8b771e9ba914bc809a13d7805410e6", size = 3697777, upload-time = "2025-09-16T15:33:23.723Z" },
+ { url = "https://files.pythonhosted.org/packages/94/78/ebf0c33bed5c9a8eed3b00eefafbcc0a687eeb1e05451c76fcf199d29ff8/obstore-0.8.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:0f2cba91f4271ca95a932a51aa8dda1537160342b33f7836c75e1eb9d40621a2", size = 3681546, upload-time = "2025-09-16T15:33:24.935Z" },
+ { url = "https://files.pythonhosted.org/packages/af/21/9bf4fb9e53fd5f01af580b6538de2eae857e31d24b0ebfc4d916c306a1e4/obstore-0.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:23c876d603af0627627808d19a58d43eb5d8bfd02eecd29460bc9a58030fed55", size = 3765336, upload-time = "2025-09-16T15:33:26.069Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3c/7f6895c23719482d231b2d6ed328e3223fdf99785f6850fba8d2fc5a86ee/obstore-0.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ff3c4b5d07629b70b9dee494cd6b94fff8465c3864752181a1cb81a77190fe42", size = 3941142, upload-time = "2025-09-16T15:33:27.275Z" },
+ { url = "https://files.pythonhosted.org/packages/93/a4/56ccdb756161595680a28f4b0def2c04f7048ffacf128029be8394367b26/obstore-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:aadb2cb72de7227d07f4570f82729625ffc77522fadca5cf13c3a37fbe8c8de9", size = 3970172, upload-time = "2025-09-16T15:33:28.393Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/dc/60fefbb5736e69eab56657bca04ca64dc07fdeccb3814164a31b62ad066b/obstore-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:bb70ce297a47392b1d9a3e310f18d59cd5ebbb9453428210fef02ed60e4d75d1", size = 3612955, upload-time = "2025-09-16T15:33:29.527Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/8b/844e8f382e5a12b8a3796a05d76a03e12c7aedc13d6900419e39207d7868/obstore-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1619bf618428abf1f607e0b219b2e230a966dcf697b717deccfa0983dd91f646", size = 3346564, upload-time = "2025-09-16T15:33:30.698Z" },
+ { url = "https://files.pythonhosted.org/packages/89/73/8537f99e09a38a54a6a15ede907aa25d4da089f767a808f0b2edd9c03cec/obstore-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4605c3ed7c9515aeb4c619b5f7f2c9986ed4a79fe6045e536b5e59b804b1476", size = 3460809, upload-time = "2025-09-16T15:33:31.837Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/99/7714dec721e43f521d6325a82303a002cddad089437640f92542b84e9cc8/obstore-0.8.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce42670417876dd8668cbb8659e860e9725e5f26bbc86449fd259970e2dd9d18", size = 3692081, upload-time = "2025-09-16T15:33:33.028Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/bd/4ac4175fe95a24c220a96021c25c432bcc0c0212f618be0737184eebbaad/obstore-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a3e893b2a06585f651c541c1972fe1e3bf999ae2a5fda052ee55eb7e6516f5", size = 3957466, upload-time = "2025-09-16T15:33:34.528Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/04/caa288fb735484fc5cb019bdf3d896eaccfae0ac4622e520d05692c46790/obstore-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08462b32f95a9948ed56ed63e88406e2e5a4cae1fde198f9682e0fb8487100ed", size = 3951293, upload-time = "2025-09-16T15:33:35.733Z" },
+ { url = "https://files.pythonhosted.org/packages/44/2f/d380239da2d6a1fda82e17df5dae600a404e8a93a065784518ff8325d5f6/obstore-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a0bf7763292a8fc47d01cd66e6f19002c5c6ad4b3ed4e6b2729f5e190fa8a0d", size = 3766199, upload-time = "2025-09-16T15:33:36.904Z" },
+ { url = "https://files.pythonhosted.org/packages/28/41/d391be069d3da82969b54266948b2582aeca5dd735abeda4d63dba36e07b/obstore-0.8.2-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:bcd47f8126cb192cbe86942b8f73b1c45a651ce7e14c9a82c5641dfbf8be7603", size = 3529678, upload-time = "2025-09-16T15:33:38.221Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/4c/4862fdd1a3abde459ee8eea699b1797df638a460af235b18ca82c8fffb72/obstore-0.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57eda9fd8c757c3b4fe36cf3918d7e589cc1286591295cc10b34122fa36dd3fd", size = 3698079, upload-time = "2025-09-16T15:33:39.696Z" },
+ { url = "https://files.pythonhosted.org/packages/68/ca/014e747bc53b570059c27e3565b2316fbe5c107d4134551f4cd3e24aa667/obstore-0.8.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ea44442aad8992166baa69f5069750979e4c5d9ffce772e61565945eea5774b9", size = 3687154, upload-time = "2025-09-16T15:33:40.92Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/89/6db5f8edd93028e5b8bfbeee15e6bd3e56f72106107d31cb208b57659de4/obstore-0.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:41496a3ab8527402db4142aaaf0d42df9d7d354b13ba10d9c33e0e48dd49dd96", size = 3773444, upload-time = "2025-09-16T15:33:42.123Z" },
+ { url = "https://files.pythonhosted.org/packages/26/e5/c9e2cc540689c873beb61246e1615d6e38301e6a34dec424f5a5c63c1afd/obstore-0.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43da209803f052df96c7c3cbec512d310982efd2407e4a435632841a51143170", size = 3939315, upload-time = "2025-09-16T15:33:43.252Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/c9/bb53280ca50103c1ffda373cdc9b0f835431060039c2897cbc87ddd92e42/obstore-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:1836f5dcd49f9f2950c75889ab5c51fb290d3ea93cdc39a514541e0be3af016e", size = 3978234, upload-time = "2025-09-16T15:33:44.393Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/5d/8c3316cc958d386d5e6ab03e9db9ddc27f8e2141cee4a6777ae5b92f3aac/obstore-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:212f033e53fe6e53d64957923c5c88949a400e9027f7038c705ec2e9038be563", size = 3612027, upload-time = "2025-09-16T15:33:45.6Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/4d/699359774ce6330130536d008bfc32827fab0c25a00238d015a5974a3d1d/obstore-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bee21fa4ba148d08fa90e47a96df11161661ed31e09c056a373cb2154b0f2852", size = 3344686, upload-time = "2025-09-16T15:33:47.185Z" },
+ { url = "https://files.pythonhosted.org/packages/82/37/55437341f10512906e02fd9fa69a8a95ad3f2f6a916d3233fda01763d110/obstore-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4c66594b59832ff1ced4c72575d9beb8b5f9b4e404ac1150a42bfb226617fd50", size = 3459860, upload-time = "2025-09-16T15:33:48.382Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/51/4245a616c94ee4851965e33f7a563ab4090cc81f52cc73227ff9ceca2e46/obstore-0.8.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:089f33af5c2fe132d00214a0c1f40601b28f23a38e24ef9f79fb0576f2730b74", size = 3691648, upload-time = "2025-09-16T15:33:49.524Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/f1/4e2fb24171e3ca3641a4653f006be826e7e17634b11688a5190553b00b83/obstore-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d87f658dfd340d5d9ea2d86a7c90d44da77a0db9e00c034367dca335735110cf", size = 3956867, upload-time = "2025-09-16T15:33:51.082Z" },
+ { url = "https://files.pythonhosted.org/packages/42/f5/b703115361c798c9c1744e1e700d5908d904a8c2e2bd38bec759c9ffb469/obstore-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e2e4fa92828c4fbc2d487f3da2d3588701a1b67d9f6ca3c97cc2afc912e9c63", size = 3950599, upload-time = "2025-09-16T15:33:52.173Z" },
+ { url = "https://files.pythonhosted.org/packages/53/20/08c6dc0f20c1394e2324b9344838e4e7af770cdcb52c30757a475f50daeb/obstore-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab440e89c5c37a8ec230857dd65147d4b923e0cada33297135d05e0f937d696a", size = 3765865, upload-time = "2025-09-16T15:33:53.291Z" },
+ { url = "https://files.pythonhosted.org/packages/77/20/77907765e29b2eba6bd8821872284d91170d7084f670855b2dfcb249ea14/obstore-0.8.2-cp313-cp313-manylinux_2_24_aarch64.whl", hash = "sha256:b9beed107c5c9cd995d4a73263861fcfbc414d58773ed65c14f80eb18258a932", size = 3529807, upload-time = "2025-09-16T15:33:54.535Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/f5/f629d39cc30d050f52b1bf927e4d65c1cc7d7ffbb8a635cd546b5c5219a0/obstore-0.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b75b4e7746292c785e31edcd5aadc8b758238372a19d4c5e394db5c305d7d175", size = 3693629, upload-time = "2025-09-16T15:33:56.016Z" },
+ { url = "https://files.pythonhosted.org/packages/30/ff/106763fd10f2a1cb47f2ef1162293c78ad52f4e73223d8d43fc6b755445d/obstore-0.8.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f33e6c366869d05ab0b7f12efe63269e631c5450d95d6b4ba4c5faf63f69de70", size = 3686176, upload-time = "2025-09-16T15:33:57.247Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/0c/d2ccb6f32feeca906d5a7c4255340df5262af8838441ca06c9e4e37b67d5/obstore-0.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:12c885a9ce5ceb09d13cc186586c0c10b62597eff21b985f6ce8ff9dab963ad3", size = 3773081, upload-time = "2025-09-16T15:33:58.475Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/79/40d1cc504cefc89c9b3dd8874287f3fddc7d963a8748d6dffc5880222013/obstore-0.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4accc883b93349a81c9931e15dd318cc703b02bbef2805d964724c73d006d00e", size = 3938589, upload-time = "2025-09-16T15:33:59.734Z" },
+ { url = "https://files.pythonhosted.org/packages/14/dd/916c6777222db3271e9fb3cf9a97ed92b3a9b3e465bdeec96de9ab809d53/obstore-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ec850adf9980e5788a826ccfd5819989724e2a2f712bfa3258e85966c8d9981e", size = 3977768, upload-time = "2025-09-16T15:34:01.25Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/37/14bae1f5bf4369027abc5315cdba2428ad4c16e2fd3bd5d35b7ee584aa0c/obstore-0.8.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6ea04118980a9c22fc8581225ff4507b6a161baf8949d728d96e68326ebaab59", size = 3624857, upload-time = "2025-09-16T15:34:35.601Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/c4/8cba91629aa20479ba86a57c2c2b3bc0a54fc6a31a4594014213603efae6/obstore-0.8.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5f33a7570b6001b54252260fbec18c3f6d21e25d3ec57e9b6c5e7330e8290eb2", size = 3355999, upload-time = "2025-09-16T15:34:36.954Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/10/3e40557d6d9c38c5a0f7bac1508209b9dbb8c4da918ddfa9326ba9a1de3f/obstore-0.8.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11fa78dfb749edcf5a041cd6db20eae95b3e8b09dfdd9b38d14939da40e7c115", size = 3457322, upload-time = "2025-09-16T15:34:38.143Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/01/dcf7988350c286683698cbdd8c15498aec43cbca72eaabad06fd77f0f34a/obstore-0.8.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872bc0921ff88305884546ba05e258ccd95672a03d77db123f0d0563fd3c000b", size = 3689452, upload-time = "2025-09-16T15:34:39.638Z" },
+ { url = "https://files.pythonhosted.org/packages/97/02/643eb2ede58933e47bdbc92786058c83d9aa569826d5bf6e83362d24a27a/obstore-0.8.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72556a2fbf018edd921286283e5c7eec9f69a21c6d12516d8a44108eceaa526a", size = 3961171, upload-time = "2025-09-16T15:34:41.232Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/5d/c0b515df6089d0f54109de8031a6f6ed31271361948bee90ab8271d22f79/obstore-0.8.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75fa1abf21499dfcfb0328941a175f89a9aa58245bf00e3318fe928e4b10d297", size = 3935988, upload-time = "2025-09-16T15:34:42.501Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/97/114d7bc172bb846472181d6fa3e950172ee1b1ccd11291777303c499dbdd/obstore-0.8.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f54f72f30cd608c4399679781c884bf8a0e816c1977a2fac993bf5e1fb30609f", size = 3771781, upload-time = "2025-09-16T15:34:44.405Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/43/4aa6de6dc406ef5e109b21a5614c34999575de638254deb456703fae24aa/obstore-0.8.2-pp310-pypy310_pp73-manylinux_2_24_aarch64.whl", hash = "sha256:b044ebf1bf7b8f7b0ca309375c1cd9e140be79e072ae8c70bbd5d9b2ad1f7678", size = 3536689, upload-time = "2025-09-16T15:34:45.649Z" },
+ { url = "https://files.pythonhosted.org/packages/06/a5/870ce541aa1a9ee1d9c3e99c2187049bf5a4d278ee9678cc449aae0a4e68/obstore-0.8.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b1326cd2288b64d6fe8857cc22d3a8003b802585fc0741eff2640a8dc35e8449", size = 3700560, upload-time = "2025-09-16T15:34:47.252Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/93/76a5fc3833aaa833b4152950d9cdfd328493a48316c24e32ddefe9b8870f/obstore-0.8.2-pp310-pypy310_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:ba6863230648a9b0e11502d2745d881cf74262720238bc0093c3eabd22a3b24c", size = 3683450, upload-time = "2025-09-16T15:34:49.589Z" },
+ { url = "https://files.pythonhosted.org/packages/15/3c/4c389362c187630c42f61ef9214e67fc336e44b8aafc47cf49ba9ab8007d/obstore-0.8.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:887615da9eeefeb2df849d87c380e04877487aa29dbeb367efc3f17f667470d3", size = 3766628, upload-time = "2025-09-16T15:34:51.937Z" },
+ { url = "https://files.pythonhosted.org/packages/03/12/08547e63edf2239ec6660af434602208ab6f394955ef660a6edda13a0bee/obstore-0.8.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4eec1fb32ffa4fb9fe9ad584611ff031927a5c22732b56075ee7204f0e35ebdf", size = 3944069, upload-time = "2025-09-16T15:34:54.108Z" },
+]
+
[[package]]
name = "ocrmac"
version = "1.0.1"