From e7b077a901a991b21f367ef90d22d4cb01ae2c0c Mon Sep 17 00:00:00 2001 From: Dane Urban Date: Tue, 5 May 2026 15:56:12 -0700 Subject: [PATCH] feat(sessions): long-lived session pods with manual lifecycle Adds two new routes: * POST /v1/sessions creates a long-lived executor pod/container, optionally pre-staging files from /v1/files. * DELETE /v1/sessions/{id} tears the session down. Both Docker and Kubernetes backends are supported. Sessions are labeled ``app=code-interpreter,component=session`` so they can be enumerated for debugging, and named ``code-session-`` so the delete path can sanity-check that callers aren't accidentally targeting other resources. Known limitation: this PR has no automatic cleanup. A session that the caller forgets to DELETE will live for the configured idle window (``SESSION_MAX_LIFETIME_SECONDS`` = 24h). The follow-up PR adds per-session TTL enforcement plus a reaper for crash recovery. Tests cover both backends and the route layer (success, 404 mapping, 501 mapping, file resolution, prefix-based safety check on delete). Co-Authored-By: Claude Opus 4.7 (1M context) --- code-interpreter/app/api/routes.py | 73 ++++++++- code-interpreter/app/models/schemas.py | 11 ++ .../app/services/executor_base.py | 30 ++++ .../app/services/executor_docker.py | 59 +++++++ .../app/services/executor_kubernetes.py | 54 +++++++ .../integration_tests/test_sessions_docker.py | 148 ++++++++++++++++++ .../test_sessions_kubernetes.py | 108 +++++++++++++ .../integration_tests/test_sessions_routes.py | 127 +++++++++++++++ 8 files changed, 606 insertions(+), 4 deletions(-) create mode 100644 code-interpreter/tests/integration_tests/test_sessions_docker.py create mode 100644 code-interpreter/tests/integration_tests/test_sessions_kubernetes.py create mode 100644 code-interpreter/tests/integration_tests/test_sessions_routes.py diff --git a/code-interpreter/app/api/routes.py b/code-interpreter/app/api/routes.py index 371ce23..f90f600 100644 --- a/code-interpreter/app/api/routes.py +++ b/code-interpreter/app/api/routes.py @@ -8,6 +8,9 @@ from app.app_configs import get_settings from app.models.schemas import ( + CreateSessionRequest, + CreateSessionResponse, + ExecuteFile, ExecuteRequest, ExecuteResponse, FileMetadataResponse, @@ -19,7 +22,7 @@ WorkspaceFile, ) from app.services.executor_base import EntryKind, StreamChunk, StreamResult, WorkspaceEntry -from app.services.executor_factory import execute_python, execute_python_streaming +from app.services.executor_factory import execute_python, execute_python_streaming, get_executor from app.services.file_storage import FileStorageService router = APIRouter() @@ -46,8 +49,8 @@ def _validate_timeout(req: ExecuteRequest) -> None: ) -def _stage_request_files( - req: ExecuteRequest, +def _resolve_uploaded_files( + files: list[ExecuteFile], storage: FileStorageService, ) -> tuple[list[tuple[str, bytes]], dict[str, bytes]]: """Resolve uploaded file IDs into content for the executor. @@ -56,7 +59,7 @@ def _stage_request_files( """ staged_files: list[tuple[str, bytes]] = [] input_files_map: dict[str, bytes] = {} - for file in req.files: + for file in files: try: content, _ = storage.get_file(file.file_id) except FileNotFoundError as exc: @@ -69,6 +72,14 @@ def _stage_request_files( return staged_files, input_files_map +def _stage_request_files( + req: ExecuteRequest, + storage: FileStorageService, +) -> tuple[list[tuple[str, bytes]], dict[str, bytes]]: + """Resolve uploaded file IDs into content for the executor.""" + return _resolve_uploaded_files(req.files, storage) + + def _save_workspace_files( entries: tuple[WorkspaceEntry, ...], input_files_map: dict[str, bytes], @@ -248,3 +259,57 @@ def delete_file(file_id: str) -> Response: ) return Response(status_code=status.HTTP_204_NO_CONTENT) + + +@router.post( + "/sessions", + response_model=CreateSessionResponse, + status_code=status.HTTP_201_CREATED, +) +def create_session(req: CreateSessionRequest) -> CreateSessionResponse: + """Create a long-lived code-executor pod. + + The session must be torn down explicitly via DELETE /v1/sessions/{id}. + """ + settings = get_settings() + storage = get_file_storage() + staged_files, _ = _resolve_uploaded_files(req.files, storage) + + try: + info = get_executor().create_session( + files=staged_files, + cpu_time_limit_sec=settings.cpu_time_limit_sec, + memory_limit_mb=settings.memory_limit_mb, + ) + except NotImplementedError as exc: + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail=str(exc), + ) from exc + except ValueError as exc: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail=str(exc), + ) from exc + + return CreateSessionResponse(session_id=info.session_id) + + +@router.delete("/sessions/{session_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_session(session_id: str) -> Response: + """Tear down a session pod by ID.""" + try: + deleted = get_executor().delete_session(session_id) + except NotImplementedError as exc: + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail=str(exc), + ) from exc + + if not deleted: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Session '{session_id}' not found", + ) + + return Response(status_code=status.HTTP_204_NO_CONTENT) diff --git a/code-interpreter/app/models/schemas.py b/code-interpreter/app/models/schemas.py index 5d85b19..53a82a6 100644 --- a/code-interpreter/app/models/schemas.py +++ b/code-interpreter/app/models/schemas.py @@ -120,3 +120,14 @@ class ListFilesResponse(BaseModel): class HealthResponse(BaseModel): status: Literal["ok", "error"] message: StrictStr | None = None + + +class CreateSessionRequest(BaseModel): + files: list[ExecuteFile] = Field( + default_factory=list, + description="Files to stage in the session workspace at create time.", + ) + + +class CreateSessionResponse(BaseModel): + session_id: StrictStr = Field(..., description="Identifier for the session pod/container.") diff --git a/code-interpreter/app/services/executor_base.py b/code-interpreter/app/services/executor_base.py index a0e8519..d9297f4 100644 --- a/code-interpreter/app/services/executor_base.py +++ b/code-interpreter/app/services/executor_base.py @@ -106,6 +106,18 @@ class HealthCheck: message: str | None = None +@dataclass(frozen=True, slots=True) +class SessionInfo: + """Identifying information for a long-lived session.""" + + session_id: str + + +SESSION_NAME_PREFIX = "code-session-" +SESSION_APP_LABEL = "code-interpreter" +SESSION_COMPONENT_LABEL = "session" + + class ExecutorProtocol(Protocol): def execute_python( self, @@ -168,6 +180,24 @@ def execute_python_streaming( """ raise NotImplementedError(f"{type(self).__name__} does not support streaming execution") + def create_session( + self, + *, + files: Sequence[tuple[str, bytes]] | None = None, + cpu_time_limit_sec: int | None = None, + memory_limit_mb: int | None = None, + ) -> SessionInfo: + """Create a long-lived execution environment. + + Returns identifying information for the session. The caller is + responsible for invoking ``delete_session`` when finished. + """ + raise NotImplementedError(f"{type(self).__name__} does not support sessions") + + def delete_session(self, session_id: str) -> bool: + """Tear down a session by ID. Returns True if found and deleted.""" + raise NotImplementedError(f"{type(self).__name__} does not support sessions") + @staticmethod def truncate_output(stream: bytes, max_bytes: int) -> str: if len(stream) <= max_bytes: diff --git a/code-interpreter/app/services/executor_docker.py b/code-interpreter/app/services/executor_docker.py index f4ada0c..967ed4a 100644 --- a/code-interpreter/app/services/executor_docker.py +++ b/code-interpreter/app/services/executor_docker.py @@ -21,10 +21,14 @@ PYTHON_EXECUTOR_DOCKER_RUN_ARGS, ) from app.services.executor_base import ( + SESSION_APP_LABEL, + SESSION_COMPONENT_LABEL, + SESSION_NAME_PREFIX, BaseExecutor, EntryKind, ExecutionResult, HealthCheck, + SessionInfo, StreamChunk, StreamEvent, StreamResult, @@ -34,6 +38,10 @@ logger = logging.getLogger(__name__) +# Sessions keep their idle container alive for at most this many seconds; a +# follow-up PR replaces this with a per-session TTL plus a reaper. +SESSION_MAX_LIFETIME_SECONDS = 24 * 60 * 60 + @dataclass class _ExecContext: @@ -394,6 +402,57 @@ def _run_in_container( finally: self._kill_container(container_name) + def create_session( + self, + *, + files: Sequence[tuple[str, bytes]] | None = None, + cpu_time_limit_sec: int | None = None, + memory_limit_mb: int | None = None, + ) -> SessionInfo: + container_name = f"{SESSION_NAME_PREFIX}{uuid.uuid4().hex}" + + cmd = self._build_run_command( + container_name=container_name, + cpu_time_limit_sec=cpu_time_limit_sec, + memory_limit_mb=memory_limit_mb, + sleep_seconds=SESSION_MAX_LIFETIME_SECONDS, + labels={ + "app": SESSION_APP_LABEL, + "component": SESSION_COMPONENT_LABEL, + }, + ) + start_proc = subprocess.run(cmd, capture_output=True, text=True) # nosec B603 + if start_proc.returncode != 0: + raise RuntimeError(f"Failed to start session container: {start_proc.stderr}") + + try: + if files: + tar_archive = self._create_tar_archive(files=files) + self._upload_tar_to_container(container_name, tar_archive) + except Exception: + self._kill_container(container_name) + raise + + logger.info("Created session container %s", container_name) + return SessionInfo(session_id=container_name) + + def delete_session(self, session_id: str) -> bool: + if not session_id.startswith(SESSION_NAME_PREFIX): + return False + result = subprocess.run( # nosec B603 + [self.docker_binary, "rm", "-f", session_id], + capture_output=True, + text=True, + ) + # `docker rm -f ` exits 0 on modern Docker, so check stderr + # for the "not found" message regardless of exit code. + stderr = (result.stderr or "").lower() + if "no such container" in stderr or "not found" in stderr: + return False + if result.returncode == 0: + return True + raise RuntimeError(f"Failed to delete session {session_id}: {result.stderr}") + def execute_python( self, *, diff --git a/code-interpreter/app/services/executor_kubernetes.py b/code-interpreter/app/services/executor_kubernetes.py index 96967af..27c05f1 100644 --- a/code-interpreter/app/services/executor_kubernetes.py +++ b/code-interpreter/app/services/executor_kubernetes.py @@ -28,10 +28,14 @@ KUBERNETES_EXECUTOR_SERVICE_ACCOUNT, ) from app.services.executor_base import ( + SESSION_APP_LABEL, + SESSION_COMPONENT_LABEL, + SESSION_NAME_PREFIX, BaseExecutor, EntryKind, ExecutionResult, HealthCheck, + SessionInfo, StreamChunk, StreamEvent, StreamResult, @@ -45,6 +49,10 @@ POD_DELETE_RETRY_DELAY_SECONDS = 0.2 POD_DELETE_CONFIRM_TIMEOUT_SECONDS = 2.0 +# Sessions keep their idle pod alive for at most this many seconds; a follow-up +# PR replaces this with a per-session TTL plus a reaper. +SESSION_MAX_LIFETIME_SECONDS = 24 * 60 * 60 + def _parse_exit_code(error: str) -> int | None: """Parse the exit code from a Kubernetes exec error channel message.""" @@ -596,6 +604,52 @@ def _cleanup_pod(self, pod_name: str) -> None: POD_DELETE_RETRIES, ) + def create_session( + self, + *, + files: Sequence[tuple[str, bytes]] | None = None, + cpu_time_limit_sec: int | None = None, + memory_limit_mb: int | None = None, + ) -> SessionInfo: + pod_name = f"{SESSION_NAME_PREFIX}{uuid.uuid4().hex}" + + manifest = self._create_pod_manifest( + pod_name=pod_name, + command=["sleep", str(SESSION_MAX_LIFETIME_SECONDS)], + labels={"app": SESSION_APP_LABEL, "component": SESSION_COMPONENT_LABEL}, + memory_limit_mb=memory_limit_mb, + cpu_time_limit_sec=cpu_time_limit_sec, + ) + + logger.info("Creating session pod %s in namespace %s", pod_name, self.namespace) + self.v1.create_namespaced_pod(namespace=self.namespace, body=manifest) + + try: + self._wait_for_pod_ready(pod_name) + if files: + tar_archive = self._create_tar_archive(files=files) + self._upload_tar_to_pod(pod_name, tar_archive) + except Exception: + self._cleanup_pod(pod_name) + raise + + return SessionInfo(session_id=pod_name) + + def delete_session(self, session_id: str) -> bool: + if not session_id.startswith(SESSION_NAME_PREFIX): + return False + try: + self.v1.delete_namespaced_pod( + name=session_id, + namespace=self.namespace, + body=client.V1DeleteOptions(grace_period_seconds=0), + ) + except ApiException as e: + if e.status == 404: + return False + raise + return True + def execute_python( self, *, diff --git a/code-interpreter/tests/integration_tests/test_sessions_docker.py b/code-interpreter/tests/integration_tests/test_sessions_docker.py new file mode 100644 index 0000000..a1a3a68 --- /dev/null +++ b/code-interpreter/tests/integration_tests/test_sessions_docker.py @@ -0,0 +1,148 @@ +"""Unit tests for DockerExecutor session methods. + +Mocks subprocess so the session lifecycle can be exercised without a real +Docker daemon. +""" + +from __future__ import annotations + +import subprocess +from unittest.mock import MagicMock, patch + +import pytest + +from app.services.executor_base import ( + SESSION_APP_LABEL, + SESSION_COMPONENT_LABEL, + SESSION_NAME_PREFIX, +) +from app.services.executor_docker import DockerExecutor + + +@pytest.fixture() +def executor() -> DockerExecutor: + """Create a DockerExecutor bypassing __init__ (no docker binary needed).""" + inst = DockerExecutor.__new__(DockerExecutor) + inst.docker_binary = "/usr/bin/docker" + inst.image = "test:latest" + inst.run_args = "" + return inst + + +def _completed( + returncode: int, stdout: str = "", stderr: str = "" +) -> subprocess.CompletedProcess[str]: + """Build a CompletedProcess in text mode (subprocess calls use text=True).""" + return subprocess.CompletedProcess(args=[], returncode=returncode, stdout=stdout, stderr=stderr) + + +def _label_values(cmd: list[str]) -> list[str]: + return [cmd[i + 1] for i, arg in enumerate(cmd) if arg == "--label"] + + +# --------------------------------------------------------------------------- +# create_session +# --------------------------------------------------------------------------- + + +def test_create_session_returns_session_info(executor: DockerExecutor) -> None: + with patch("app.services.executor_docker.subprocess.run", return_value=_completed(0)): + info = executor.create_session() + + assert info.session_id.startswith(SESSION_NAME_PREFIX) + + +def test_create_session_runs_docker_with_session_labels(executor: DockerExecutor) -> None: + with patch("app.services.executor_docker.subprocess.run") as run: + run.return_value = _completed(0) + executor.create_session() + + cmd = run.call_args.args[0] + label_values = _label_values(cmd) + assert f"app={SESSION_APP_LABEL}" in label_values + assert f"component={SESSION_COMPONENT_LABEL}" in label_values + + +def test_create_session_stages_files(executor: DockerExecutor) -> None: + with ( + patch("app.services.executor_docker.subprocess.run", return_value=_completed(0)), + patch.object(executor, "_upload_tar_to_container") as upload, + ): + info = executor.create_session(files=[("data.txt", b"hello")]) + + upload.assert_called_once() + container_arg, tar_arg = upload.call_args.args + assert container_arg == info.session_id + assert isinstance(tar_arg, bytes) + assert len(tar_arg) > 0 + + +def test_create_session_skips_upload_when_no_files(executor: DockerExecutor) -> None: + with ( + patch("app.services.executor_docker.subprocess.run", return_value=_completed(0)), + patch.object(executor, "_upload_tar_to_container") as upload, + ): + executor.create_session() + + upload.assert_not_called() + + +def test_create_session_kills_container_on_staging_failure(executor: DockerExecutor) -> None: + with ( + patch("app.services.executor_docker.subprocess.run", return_value=_completed(0)), + patch.object(executor, "_upload_tar_to_container", side_effect=RuntimeError("boom")), + patch.object(executor, "_kill_container") as kill, + pytest.raises(RuntimeError, match="boom"), + ): + executor.create_session(files=[("data.txt", b"x")]) + + kill.assert_called_once() + + +def test_create_session_raises_when_docker_run_fails(executor: DockerExecutor) -> None: + with ( + patch( + "app.services.executor_docker.subprocess.run", + return_value=_completed(1, stderr="docker daemon down"), + ), + pytest.raises(RuntimeError, match="Failed to start session container"), + ): + executor.create_session() + + +# --------------------------------------------------------------------------- +# delete_session +# --------------------------------------------------------------------------- + + +def test_delete_session_returns_true_on_success(executor: DockerExecutor) -> None: + with patch("app.services.executor_docker.subprocess.run", return_value=_completed(0)): + assert executor.delete_session(f"{SESSION_NAME_PREFIX}abc") is True + + +def test_delete_session_returns_false_on_no_such_container(executor: DockerExecutor) -> None: + """Modern Docker exits 0 even when the container is missing — stderr is the signal.""" + with patch( + "app.services.executor_docker.subprocess.run", + return_value=_completed(0, stderr="Error: No such container: code-session-abc"), + ): + assert executor.delete_session(f"{SESSION_NAME_PREFIX}abc") is False + + +def test_delete_session_rejects_non_session_id(executor: DockerExecutor) -> None: + """Prefix check prevents accidentally deleting unrelated containers.""" + run_mock = MagicMock() + with patch("app.services.executor_docker.subprocess.run", run_mock): + assert executor.delete_session("random-name") is False + run_mock.assert_not_called() + + +def test_delete_session_raises_on_unexpected_failure(executor: DockerExecutor) -> None: + with ( + patch( + "app.services.executor_docker.subprocess.run", + return_value=_completed(1, stderr="some other failure"), + ), + pytest.raises(RuntimeError, match="Failed to delete session"), + ): + executor.delete_session(f"{SESSION_NAME_PREFIX}abc") diff --git a/code-interpreter/tests/integration_tests/test_sessions_kubernetes.py b/code-interpreter/tests/integration_tests/test_sessions_kubernetes.py new file mode 100644 index 0000000..3fada96 --- /dev/null +++ b/code-interpreter/tests/integration_tests/test_sessions_kubernetes.py @@ -0,0 +1,108 @@ +"""Unit tests for KubernetesExecutor session methods. + +Mocks the Kubernetes API so the session lifecycle can be exercised without +a real cluster. +""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +import pytest +from kubernetes.client.exceptions import ApiException # type: ignore[import-untyped] + +from app.services.executor_base import ( + SESSION_APP_LABEL, + SESSION_COMPONENT_LABEL, + SESSION_NAME_PREFIX, +) +from app.services.executor_kubernetes import KubernetesExecutor + + +@pytest.fixture() +def executor() -> KubernetesExecutor: + """Create a KubernetesExecutor bypassing __init__ (no cluster needed).""" + inst = KubernetesExecutor.__new__(KubernetesExecutor) + inst.v1 = MagicMock() + inst.namespace = "test" + inst.image = "test:latest" + inst.service_account = "" + pod_mock = MagicMock() + pod_mock.status.phase = "Running" + inst.v1.read_namespaced_pod.return_value = pod_mock + return inst + + +# --------------------------------------------------------------------------- +# create_session +# --------------------------------------------------------------------------- + + +def test_create_session_returns_session_info(executor: KubernetesExecutor) -> None: + info = executor.create_session() + assert info.session_id.startswith(SESSION_NAME_PREFIX) + + +def test_create_session_pod_carries_session_metadata(executor: KubernetesExecutor) -> None: + info = executor.create_session() + + pod = executor.v1.create_namespaced_pod.call_args.kwargs["body"] + assert pod.metadata.name == info.session_id + assert pod.metadata.labels["app"] == SESSION_APP_LABEL + assert pod.metadata.labels["component"] == SESSION_COMPONENT_LABEL + + +def test_create_session_stages_files(executor: KubernetesExecutor) -> None: + with patch.object(executor, "_upload_tar_to_pod") as upload: + info = executor.create_session(files=[("data.txt", b"hello")]) + + upload.assert_called_once() + pod_name_arg, tar_arg = upload.call_args.args + assert pod_name_arg == info.session_id + assert isinstance(tar_arg, bytes) + assert len(tar_arg) > 0 + + +def test_create_session_skips_upload_when_no_files(executor: KubernetesExecutor) -> None: + with patch.object(executor, "_upload_tar_to_pod") as upload: + executor.create_session() + + upload.assert_not_called() + + +def test_create_session_cleans_up_on_staging_failure(executor: KubernetesExecutor) -> None: + with ( + patch.object(executor, "_upload_tar_to_pod", side_effect=RuntimeError("boom")), + patch.object(executor, "_cleanup_pod") as cleanup, + pytest.raises(RuntimeError, match="boom"), + ): + executor.create_session(files=[("data.txt", b"x")]) + + cleanup.assert_called_once() + + +# --------------------------------------------------------------------------- +# delete_session +# --------------------------------------------------------------------------- + + +def test_delete_session_returns_true_on_success(executor: KubernetesExecutor) -> None: + assert executor.delete_session(f"{SESSION_NAME_PREFIX}abc") is True + executor.v1.delete_namespaced_pod.assert_called_once() + + +def test_delete_session_returns_false_on_404(executor: KubernetesExecutor) -> None: + executor.v1.delete_namespaced_pod.side_effect = ApiException(status=404) + assert executor.delete_session(f"{SESSION_NAME_PREFIX}abc") is False + + +def test_delete_session_rejects_non_session_id(executor: KubernetesExecutor) -> None: + """Prefix check prevents accidentally deleting unrelated pods.""" + assert executor.delete_session("code-exec-abc") is False + executor.v1.delete_namespaced_pod.assert_not_called() + + +def test_delete_session_propagates_other_api_errors(executor: KubernetesExecutor) -> None: + executor.v1.delete_namespaced_pod.side_effect = ApiException(status=500) + with pytest.raises(ApiException): + executor.delete_session(f"{SESSION_NAME_PREFIX}abc") diff --git a/code-interpreter/tests/integration_tests/test_sessions_routes.py b/code-interpreter/tests/integration_tests/test_sessions_routes.py new file mode 100644 index 0000000..e7bcd77 --- /dev/null +++ b/code-interpreter/tests/integration_tests/test_sessions_routes.py @@ -0,0 +1,127 @@ +"""Route-layer tests for /v1/sessions. + +Patches the executor so the routes can be exercised without a real Docker +daemon or Kubernetes cluster. +""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +import pytest +from fastapi.testclient import TestClient + +from app.main import create_app +from app.services.executor_base import SessionInfo +from app.services.executor_factory import get_executor + + +@pytest.fixture(autouse=True) +def _clear_executor_cache() -> Generator[None, None, None]: + get_executor.cache_clear() + yield + get_executor.cache_clear() + + +def test_create_session_returns_session_id() -> None: + mock_executor = MagicMock() + mock_executor.create_session.return_value = SessionInfo(session_id="code-session-abc") + + with patch("app.api.routes.get_executor", return_value=mock_executor): + client = TestClient(create_app()) + response = client.post("/v1/sessions", json={}) + + assert response.status_code == 201 + body = response.json() + assert body["session_id"] == "code-session-abc" + + +def test_create_session_returns_404_for_unknown_file_id() -> None: + client = TestClient(create_app()) + response = client.post( + "/v1/sessions", + json={"files": [{"path": "data.txt", "file_id": "does-not-exist"}]}, + ) + assert response.status_code == 404 + assert "not found" in response.json()["detail"].lower() + + +def test_create_session_resolves_file_ids_into_content() -> None: + """Files referenced by file_id must be loaded and passed to the executor.""" + client = TestClient(create_app()) + upload_resp = client.post( + "/v1/files", + files={"file": ("data.txt", b"hello bytes", "application/octet-stream")}, + ) + file_id = upload_resp.json()["file_id"] + + mock_executor = MagicMock() + mock_executor.create_session.return_value = SessionInfo(session_id="code-session-x") + + with patch("app.api.routes.get_executor", return_value=mock_executor): + response = client.post( + "/v1/sessions", + json={"files": [{"path": "inputs/data.txt", "file_id": file_id}]}, + ) + + assert response.status_code == 201 + files_arg = mock_executor.create_session.call_args.kwargs["files"] + assert files_arg == [("inputs/data.txt", b"hello bytes")] + + +def test_create_session_returns_422_when_executor_raises_value_error() -> None: + mock_executor = MagicMock() + mock_executor.create_session.side_effect = ValueError("bad path") + + with patch("app.api.routes.get_executor", return_value=mock_executor): + client = TestClient(create_app()) + response = client.post("/v1/sessions", json={}) + + assert response.status_code == 422 + assert "bad path" in response.json()["detail"] + + +def test_create_session_returns_501_when_unsupported() -> None: + mock_executor = MagicMock() + mock_executor.create_session.side_effect = NotImplementedError("nope") + + with patch("app.api.routes.get_executor", return_value=mock_executor): + client = TestClient(create_app()) + response = client.post("/v1/sessions", json={}) + + assert response.status_code == 501 + + +def test_delete_session_returns_204_when_found() -> None: + mock_executor = MagicMock() + mock_executor.delete_session.return_value = True + + with patch("app.api.routes.get_executor", return_value=mock_executor): + client = TestClient(create_app()) + response = client.delete("/v1/sessions/code-session-abc") + + assert response.status_code == 204 + mock_executor.delete_session.assert_called_once_with("code-session-abc") + + +def test_delete_session_returns_404_when_unknown() -> None: + mock_executor = MagicMock() + mock_executor.delete_session.return_value = False + + with patch("app.api.routes.get_executor", return_value=mock_executor): + client = TestClient(create_app()) + response = client.delete("/v1/sessions/code-session-missing") + + assert response.status_code == 404 + + +def test_delete_session_returns_501_when_unsupported() -> None: + mock_executor = MagicMock() + mock_executor.delete_session.side_effect = NotImplementedError("nope") + + with patch("app.api.routes.get_executor", return_value=mock_executor): + client = TestClient(create_app()) + response = client.delete("/v1/sessions/code-session-abc") + + assert response.status_code == 501