diff --git a/alembic/versions/2026_04_29_0000-95c2670ff066_add_elog_last_entry.py b/alembic/versions/2026_04_29_0000-95c2670ff066_add_elog_last_entry.py new file mode 100644 index 0000000..5d86170 --- /dev/null +++ b/alembic/versions/2026_04_29_0000-95c2670ff066_add_elog_last_entry.py @@ -0,0 +1,48 @@ +"""Add elog_last_entry table for follow-up parent pre-fill. + +Revision ID: 95c2670ff066 +Revises: 6c1c0c2f8a1b +Create Date: 2026-04-29 00:00:00.000000 +""" +from collections.abc import Sequence + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "95c2670ff066" +down_revision: str | None = "6c1c0c2f8a1b" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + op.create_table( + "elog_last_entry", + sa.Column("id", sa.UUID(), nullable=False), + sa.Column("api_key_id", sa.UUID(), nullable=False), + sa.Column("logbooks_key", sa.String(length=1024), nullable=False), + sa.Column("entry_id", sa.String(length=255), nullable=False), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint(["api_key_id"], ["api_key.id"], ondelete="CASCADE"), + sa.UniqueConstraint("api_key_id", "logbooks_key", name="uq_elog_last_entry_scope"), + ) + op.create_index("ix_elog_last_entry_api_key_id", "elog_last_entry", ["api_key_id"]) + + +def downgrade() -> None: + op.drop_index("ix_elog_last_entry_api_key_id", table_name="elog_last_entry") + op.drop_table("elog_last_entry") diff --git a/app/api/v1/elog.py b/app/api/v1/elog.py new file mode 100644 index 0000000..cf37c0a --- /dev/null +++ b/app/api/v1/elog.py @@ -0,0 +1,220 @@ +"""HTTP routes for posting snapshots (and arbitrary entries) to an e-log. + +Returns 503 when no e-log provider is configured so the frontend can hide the +"Post to elog" affordance. +""" +import logging +from typing import Annotated +from datetime import datetime + +import httpx +from fastapi import Query, Depends, Security, APIRouter, HTTPException +from pydantic import Field, BaseModel + +from app.config import Settings, get_settings +from app.dependencies import DataBaseDep, get_api_key, require_read_access +from app.services.elog import ( + ElogTag, + ElogUser, + ElogAdapter, + ElogLogbook, + ElogEntryResult, + ElogEntryRequest, + get_elog_service, +) +from app.schemas.api_key import ApiKeyDTO +from app.services.elog.last_entry import get_last_entry_id, upsert_last_entry + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/elog", tags=["Elog"]) + + +# --------------------------------------------------------------------------- +# Request/Response models specific to the HTTP surface +# --------------------------------------------------------------------------- + + +class ElogConfigDTO(BaseModel): + """Exposes whether e-log posting is available to the frontend.""" + + enabled: bool + provider: str = "" + defaultLogbooks: list[str] = [] + + +class CreateEntryRequestDTO(BaseModel): + """Posted by the frontend to create an e-log entry. + + ``author`` is *not* trusted from the client — we stamp it from the API key. + """ + + logbooks: list[str] = Field(..., min_length=1) + title: str = Field(..., min_length=1, max_length=255) + bodyMarkdown: str + tags: list[str] = Field(default_factory=list) + snapshotId: str | None = None + followsUpEntryId: str | None = Field( + default=None, + description="If set, post as a follow-up of this entry id instead of a fresh entry.", + ) + additionalAuthors: list[str] = Field(default_factory=list) + important: bool = False + eventAt: datetime | None = None + + +class LastEntryResponseDTO(BaseModel): + """The most recent entry id this api key posted/followed-up for a logbook set.""" + + entryId: str | None = None + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _require_adapter(adapter: ElogAdapter | None) -> ElogAdapter: + if adapter is None: + raise HTTPException(status_code=503, detail="E-log integration is not configured") + return adapter + + +def _get_elog_adapter() -> ElogAdapter | None: + """FastAPI dependency wrapper; lets tests override via ``app.dependency_overrides``.""" + return get_elog_service() + + +async def _proxy_upstream(coro): + """Translate upstream HTTP errors into 502/504 responses.""" + try: + return await coro + except httpx.HTTPStatusError as exc: + body_excerpt = (exc.response.text or "").strip()[:500] + logger.warning("E-log upstream error: %s %s", exc.response.status_code, body_excerpt) + detail = f"E-log upstream returned {exc.response.status_code}" + if body_excerpt: + detail = f"{detail}: {body_excerpt}" + raise HTTPException(status_code=502, detail=detail) from exc + except httpx.TimeoutException as exc: + logger.warning("E-log upstream timeout: %s", exc) + raise HTTPException(status_code=504, detail="E-log upstream timed out") from exc + except httpx.HTTPError as exc: + logger.warning("E-log upstream HTTP error: %s", exc) + raise HTTPException(status_code=502, detail="E-log upstream unreachable") from exc + + +# --------------------------------------------------------------------------- +# Routes +# --------------------------------------------------------------------------- + + +@router.get("/config") +async def get_elog_config( + settings: Annotated[Settings, Depends(get_settings)], + adapter: Annotated[ElogAdapter | None, Depends(_get_elog_adapter)], +) -> ElogConfigDTO: + """Feature-flag endpoint. Always returns 200 — no auth required.""" + provider = (settings.elog_provider or "").strip() + return ElogConfigDTO( + enabled=adapter is not None, + provider=provider if adapter is not None else "", + defaultLogbooks=list(settings.elog_default_logbooks) if adapter is not None else [], + ) + + +@router.get( + "/logbooks", + dependencies=[Security(require_read_access)], +) +async def list_logbooks( + adapter: Annotated[ElogAdapter | None, Depends(_get_elog_adapter)], +) -> list[ElogLogbook]: + return await _proxy_upstream(_require_adapter(adapter).list_logbooks()) + + +@router.get( + "/users", + dependencies=[Security(require_read_access)], +) +async def search_users( + adapter: Annotated[ElogAdapter | None, Depends(_get_elog_adapter)], + search: Annotated[str, Query(min_length=1, max_length=100)], + limit: Annotated[int, Query(ge=1, le=100)] = 20, +) -> list[ElogUser]: + """Search for users/people by name or email via the e-log provider.""" + svc = _require_adapter(adapter) + try: + return await _proxy_upstream(svc.search_users(search, limit=limit)) + except NotImplementedError as exc: + raise HTTPException(status_code=501, detail=str(exc) or "User search unsupported") + + +@router.get( + "/logbooks/{logbook_id}/tags", + dependencies=[Security(require_read_access)], +) +async def list_tags( + logbook_id: str, + adapter: Annotated[ElogAdapter | None, Depends(_get_elog_adapter)], +) -> list[ElogTag]: + return await _proxy_upstream(_require_adapter(adapter).list_tags(logbook_id)) + + +@router.get( + "/last-entry", + dependencies=[Security(require_read_access)], +) +async def get_last_entry( + api_key: Annotated[ApiKeyDTO, Security(get_api_key)], + db: DataBaseDep, + logbook: Annotated[list[str], Query(min_length=1)], +) -> LastEntryResponseDTO: + """Return the last entry id this api key posted/followed-up for the given logbook set.""" + entry_id = await get_last_entry_id(db, api_key_id=api_key.id, logbooks=list(logbook)) + return LastEntryResponseDTO(entryId=entry_id) + + +@router.post("/entries") +async def create_entry( + payload: CreateEntryRequestDTO, + api_key: Annotated[ApiKeyDTO, Security(get_api_key)], + adapter: Annotated[ElogAdapter | None, Depends(_get_elog_adapter)], + db: DataBaseDep, +) -> ElogEntryResult: + """Create an e-log entry, or a follow-up if ``followsUpEntryId`` is set. Requires write access.""" + if not api_key.writeAccess: + raise HTTPException(status_code=401, detail="API key does not have write access") + adapter = _require_adapter(adapter) + + request = ElogEntryRequest( + logbooks=payload.logbooks, + title=payload.title, + body_markdown=payload.bodyMarkdown, + tags=payload.tags, + author=api_key.appName, + snapshot_id=payload.snapshotId, + additional_authors=payload.additionalAuthors, + important=payload.important, + event_at=payload.eventAt, + ) + + try: + if payload.followsUpEntryId: + result = await _proxy_upstream(adapter.create_follow_up(payload.followsUpEntryId, request)) + else: + result = await _proxy_upstream(adapter.create_entry(request)) + except NotImplementedError as exc: + raise HTTPException(status_code=501, detail=str(exc) or "Follow-up unsupported") + + try: + await upsert_last_entry( + db, + api_key_id=api_key.id, + logbooks=payload.logbooks, + entry_id=result.id, + ) + except Exception: + logger.exception("Failed to record last elog entry id; continuing") + + return result diff --git a/app/api/v1/router.py b/app/api/v1/router.py index 5ad1d76..9379dfd 100644 --- a/app/api/v1/router.py +++ b/app/api/v1/router.py @@ -1,6 +1,7 @@ from fastapi import APIRouter from app.api.v1.pvs import router as pvs_router +from app.api.v1.elog import router as elog_router from app.api.v1.jobs import router as jobs_router from app.api.v1.tags import router as tags_router from app.api.v1.health import router as health_router @@ -16,4 +17,5 @@ router.include_router(snapshots_router) router.include_router(jobs_router) router.include_router(websocket_router) +router.include_router(elog_router) router.include_router(health_router) diff --git a/app/config.py b/app/config.py index e919e74..9c095ff 100644 --- a/app/config.py +++ b/app/config.py @@ -51,6 +51,16 @@ class Settings(BaseSettings): # Performance bulk_insert_batch_size: int = 5000 + # E-log integration (plugin-based) + elog_provider: str = "" # Key into app.services.elog.ELOG_PROVIDERS; empty = disabled + elog_proxy_url: str = "" # Outbound proxy for e-log calls (control-room gateways) + elog_default_logbooks: list[str] = [] # Pre-selected logbook IDs in the post dialog + + # elog-plus adapter + elog_plus_base_url: str = "" + elog_plus_token: str = "" # Service JWT / application token + elog_plus_auth_header: str = "x-vouch-idp-accesstoken" + class Config: env_file = ".env" env_prefix = "SQUIRREL_" diff --git a/app/main.py b/app/main.py index e306e5f..77e6bee 100644 --- a/app/main.py +++ b/app/main.py @@ -21,6 +21,7 @@ from app.config import get_settings from app.api.v1.router import router as v1_router +from app.services.elog import shutdown_elog_service from app.api.v1.websocket import get_diff_manager from app.services.epics_service import get_epics_service from app.services.redis_service import get_redis_service @@ -110,6 +111,12 @@ async def lifespan(app: FastAPI): await epics.shutdown() logger.info("EPICS service shut down") + # Close e-log adapter HTTP client, if initialized + try: + await shutdown_elog_service() + except Exception as e: + logger.error(f"Error shutting down e-log adapter: {e}") + logger.info("Squirrel API shutdown complete") diff --git a/app/models/__init__.py b/app/models/__init__.py index 267eefa..3fd1c75 100644 --- a/app/models/__init__.py +++ b/app/models/__init__.py @@ -4,6 +4,7 @@ from app.models.pv import PV, pv_tag from app.models.snapshot import Snapshot, SnapshotValue from app.models.job import Job, JobStatus, JobType +from app.models.elog_last_entry import ElogLastEntry __all__ = [ "Base", @@ -17,4 +18,5 @@ "Job", "JobStatus", "JobType", + "ElogLastEntry", ] diff --git a/app/models/elog_last_entry.py b/app/models/elog_last_entry.py new file mode 100644 index 0000000..20e5154 --- /dev/null +++ b/app/models/elog_last_entry.py @@ -0,0 +1,23 @@ +"""Tracks the most recent elog entry id per (api_key, logbook set). + +The PostToElogDialog uses this to pre-fill the parent entry id when an +operator wants to follow up on their previous post (typical workflow: +morning snapshot then hourly follow-ups for the rest of the day). +""" +from sqlalchemy import Index, String, ForeignKey, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column + +from app.models.base import Base, UUIDMixin, TimestampMixin + + +class ElogLastEntry(Base, UUIDMixin, TimestampMixin): + __tablename__ = "elog_last_entry" + + api_key_id: Mapped[str] = mapped_column(ForeignKey("api_key.id", ondelete="CASCADE"), nullable=False) + logbooks_key: Mapped[str] = mapped_column(String(1024), nullable=False) + entry_id: Mapped[str] = mapped_column(String(255), nullable=False) + + __table_args__ = ( + UniqueConstraint("api_key_id", "logbooks_key", name="uq_elog_last_entry_scope"), + Index("ix_elog_last_entry_api_key_id", "api_key_id"), + ) diff --git a/app/services/elog/__init__.py b/app/services/elog/__init__.py new file mode 100644 index 0000000..6b9c478 --- /dev/null +++ b/app/services/elog/__init__.py @@ -0,0 +1,85 @@ +"""E-log plugin registry. + +``get_elog_service`` returns the configured adapter or ``None`` when e-log +integration is disabled (``SQUIRREL_ELOG_PROVIDER`` unset/empty). +""" +from __future__ import annotations + +import logging +from typing import Callable + +from app.config import Settings, get_settings +from app.services.elog.base import ( + ElogTag, + ElogUser, + ElogAdapter, + ElogLogbook, + ElogEntryRequest, + ElogEntryResult, +) +from app.services.elog.elog_plus import ElogPlusAdapter + +logger = logging.getLogger(__name__) + + +def _build_elog_plus(settings: Settings) -> ElogAdapter: + return ElogPlusAdapter( + base_url=settings.elog_plus_base_url, + token=settings.elog_plus_token, + auth_header=settings.elog_plus_auth_header, + proxy_url=settings.elog_proxy_url or None, + ) + + +ELOG_PROVIDERS: dict[str, Callable[[Settings], ElogAdapter]] = { + "elog_plus": _build_elog_plus, +} + + +_adapter: ElogAdapter | None = None +_adapter_provider: str | None = None + + +def get_elog_service() -> ElogAdapter | None: + """Return the configured adapter singleton, or ``None`` when disabled.""" + global _adapter, _adapter_provider + + settings = get_settings() + provider = (settings.elog_provider or "").strip() + + if not provider: + return None + + if _adapter is not None and _adapter_provider == provider: + return _adapter + + factory = ELOG_PROVIDERS.get(provider) + if factory is None: + logger.warning("Unknown SQUIRREL_ELOG_PROVIDER=%r; e-log disabled.", provider) + return None + + _adapter = factory(settings) + _adapter_provider = provider + logger.info("E-log adapter initialized: %s", provider) + return _adapter + + +async def shutdown_elog_service() -> None: + """Close the cached adapter, if any. Called from the app lifespan.""" + global _adapter, _adapter_provider + if _adapter is not None: + await _adapter.close() + _adapter = None + _adapter_provider = None + + +__all__ = [ + "ELOG_PROVIDERS", + "ElogAdapter", + "ElogEntryRequest", + "ElogEntryResult", + "ElogLogbook", + "ElogTag", + "get_elog_service", + "shutdown_elog_service", +] diff --git a/app/services/elog/base.py b/app/services/elog/base.py new file mode 100644 index 0000000..c5ace25 --- /dev/null +++ b/app/services/elog/base.py @@ -0,0 +1,97 @@ +"""Plugin contract for posting entries to an electronic logbook. + +Labs running Squirrel with a different e-log system implement :class:`ElogAdapter`, +register the subclass in ``app.services.elog.ELOG_PROVIDERS``, and set +``SQUIRREL_ELOG_PROVIDER`` to the registered key. +""" +from abc import ABC, abstractmethod +from datetime import datetime + +from pydantic import Field, BaseModel + + +class ElogLogbook(BaseModel): + """A logbook the user can post to.""" + + id: str + name: str + + +class ElogTag(BaseModel): + """A tag attachable to an entry within a logbook.""" + + id: str + name: str + + +class ElogUser(BaseModel): + """A user/person from the e-log backend (typically backed by LDAP).""" + + uid: str + commonName: str + surname: str + gecos: str + mail: str + + +class ElogEntryRequest(BaseModel): + """Input for :meth:`ElogAdapter.create_entry`.""" + + logbooks: list[str] = Field(..., min_length=1, description="Logbook IDs to post into.") + title: str = Field(..., min_length=1, max_length=255) + body_markdown: str = Field(..., description="Entry body authored by the user; markdown.") + tags: list[str] = Field(default_factory=list, description="Tag IDs to attach.") + author: str = Field(..., description="Human-readable attribution (typically API key's appName).") + snapshot_id: str | None = Field(default=None, description="Optional source snapshot for auditing.") + additional_authors: list[str] = Field( + default_factory=list, + description="Extra authors to credit on the entry (e.g. emails).", + ) + important: bool = Field(default=False, description="Mark the entry as important.") + event_at: datetime | None = Field( + default=None, + description="When the event occurred. Adapters should serialize without offset if needed.", + ) + + +class ElogEntryResult(BaseModel): + """Returned from :meth:`ElogAdapter.create_entry`.""" + + id: str + url: str | None = None + + +class ElogAdapter(ABC): + """Abstract base class for e-log backend plugins.""" + + @abstractmethod + async def list_logbooks(self) -> list[ElogLogbook]: + ... + + @abstractmethod + async def list_tags(self, logbook_id: str) -> list[ElogTag]: + ... + + @abstractmethod + async def create_entry(self, request: ElogEntryRequest) -> ElogEntryResult: + ... + + async def search_users(self, search: str, limit: int = 20) -> list[ElogUser]: + """Search for users/people by name or email. + + Default raises NotImplementedError — adapters that support user + lookup override this. + """ + raise NotImplementedError("This e-log provider does not support user search") + + async def create_follow_up(self, parent_entry_id: str, request: ElogEntryRequest) -> ElogEntryResult: + """Post ``request`` as a follow-up of ``parent_entry_id``. + + Adapters whose backend has no follow-up concept can leave this default + in place; the HTTP layer translates the error into 501. + """ + raise NotImplementedError("This e-log provider does not support follow-ups") + + async def close(self) -> None: + """Release resources (HTTP clients, pools). Called on app shutdown.""" + return None diff --git a/app/services/elog/elog_plus.py b/app/services/elog/elog_plus.py new file mode 100644 index 0000000..32134db --- /dev/null +++ b/app/services/elog/elog_plus.py @@ -0,0 +1,197 @@ +"""Adapter for the elog-plus backend (https://github.com/slaclab/elog-plus).""" +import logging +from typing import Any + +import httpx +import markdown as markdown_lib + +from app.services.elog.base import ( + ElogTag, + ElogAdapter, + ElogLogbook, + ElogEntryResult, + ElogEntryRequest, +) + +logger = logging.getLogger(__name__) + + +# elog-plus stores the entry text as-is and parses it as HTML (jsoup) to +# extract image attachments; it does not run a markdown processor. Render on +# our side so the entry displays formatted instead of raw markdown. +_MARKDOWN_EXTENSIONS = ["extra", "sane_lists", "nl2br"] + + +def _render_markdown(md: str) -> str: + return markdown_lib.markdown(md, extensions=_MARKDOWN_EXTENSIONS, output_format="html") + + +def _unwrap(body: Any) -> Any: + """Strip elog-plus' ``ApiResultResponse`` envelope when present.""" + if isinstance(body, dict) and "payload" in body: + return body["payload"] + return body + + +class ElogPlusAdapter(ElogAdapter): + """Calls the elog-plus REST API over HTTP.""" + + def __init__( + self, + *, + base_url: str, + token: str, + auth_header: str = "x-vouch-idp-accesstoken", + proxy_url: str | None = None, + timeout: float = 15.0, + ): + if not base_url: + raise ValueError("elog_plus_base_url is required") + if not token: + raise ValueError("elog_plus_token is required") + + self._auth_header = auth_header + self._client = httpx.AsyncClient( + base_url=base_url.rstrip("/"), + headers={auth_header: token}, + timeout=timeout, + proxy=proxy_url or None, + trust_env=False, + ) + + async def list_logbooks(self) -> list[ElogLogbook]: + resp = await self._client.get("/v1/logbooks") + resp.raise_for_status() + items = _unwrap(resp.json()) or [] + return [ElogLogbook(id=item["id"], name=item["name"]) for item in items] + + async def _resolve_logbook_ids(self, logbooks: list[str]) -> list[str]: + """Resolve any logbook names in ``logbooks`` to their IDs. + + v1's ``POST /v1/entries`` and ``POST /v1/entries/{id}/follow-ups`` both + validate logbooks as IDs (auth checks, ``canCreateNewEntry`` / + ``canCreateNewFollowUp``). Items already matching a known ID pass + through unchanged. + """ + all_logbooks = await self.list_logbooks() + by_id = {lb.id: lb.id for lb in all_logbooks} + by_name = {lb.name.lower(): lb.id for lb in all_logbooks} + resolved: list[str] = [] + for entry in logbooks: + if entry in by_id: + resolved.append(entry) + else: + resolved.append(by_name.get(entry.lower(), entry)) + return resolved + + async def list_tags(self, logbook_id: str) -> list[ElogTag]: + resp = await self._client.get(f"/v1/logbooks/{logbook_id}/tags") + resp.raise_for_status() + items = _unwrap(resp.json()) or [] + return [ElogTag(id=item["id"], name=item["name"]) for item in items] + + async def _resolve_tag_ids(self, tags: list[str], logbook_ids: list[str]) -> list[str]: + """Resolve tag names to IDs across the given logbooks. + + v1 entries validate ``newEntry.tags`` as IDs + (``LogbookService::tagIdExistInAnyLogbookIds``). Items already matching + a known ID pass through unchanged. + """ + if not tags or not logbook_ids: + return list(tags) + by_id: dict[str, str] = {} + by_name: dict[str, str] = {} + for lb_id in logbook_ids: + try: + logbook_tags = await self.list_tags(lb_id) + except httpx.HTTPError: + continue + for t in logbook_tags: + by_id[t.id] = t.id + by_name.setdefault(t.name.lower(), t.id) + resolved: list[str] = [] + for entry in tags: + if entry in by_id: + resolved.append(entry) + else: + resolved.append(by_name.get(entry.lower(), entry)) + return resolved + + def _build_entry_dto(self, request: ElogEntryRequest) -> dict[str, Any]: + body_md = f"_Posted by **{request.author}** via Squirrel_\n\n{request.body_markdown}" + dto: dict[str, Any] = { + "logbooks": request.logbooks, + "title": request.title, + "text": _render_markdown(body_md), + "tags": request.tags, + } + # elog-plus rejects the whole post if any entry in additionalAuthors is + # not a valid LDAP email (EntryService.java:374). We forward only the + # user-selected emails — the API key's app name is already credited in + # the body attribution above. + if request.additional_authors: + dto["additionalAuthors"] = list(request.additional_authors) + if request.important: + dto["important"] = True + if request.event_at is not None: + # elog-plus expects LocalDateTime (no offset) + dto["eventAt"] = request.event_at.replace(tzinfo=None).isoformat(timespec="seconds") + return dto + + @staticmethod + def _extract_id(body: Any) -> str: + entry_id = _unwrap(body) + if isinstance(entry_id, dict): + entry_id = entry_id.get("id") or entry_id.get("entryId") or "" + return str(entry_id) + + async def search_users(self, search: str, limit: int = 20) -> list: + from app.services.elog.base import ElogUser + + resp = await self._client.get( + "/v1/users", + params={"search": search, "limit": limit}, + ) + resp.raise_for_status() + items = _unwrap(resp.json()) or [] + return [ + ElogUser( + uid=item.get("uid", item.get("id", "")), + commonName=item.get("commonName", item.get("name", "")), + surname=item.get("surname", ""), + gecos=item.get("gecos", ""), + mail=item.get("mail", item.get("email", "")), + ) + for item in items + ] + + async def _build_resolved_dto(self, request: ElogEntryRequest) -> dict[str, Any]: + """Resolve logbook/tag names to IDs and build the v1 JSON payload. + + We use v1 endpoints for both fresh creates and follow-ups because v2's + ``NewEntryDTO`` is missing ``additionalAuthors`` (silently dropped via + ``@JsonIgnoreProperties(ignoreUnknown = true)``). v1 takes IDs. + """ + resolved_logbooks = await self._resolve_logbook_ids(request.logbooks) + resolved_tags = await self._resolve_tag_ids(request.tags, resolved_logbooks) + return self._build_entry_dto( + request.model_copy(update={"logbooks": resolved_logbooks, "tags": resolved_tags}) + ) + + async def create_entry(self, request: ElogEntryRequest) -> ElogEntryResult: + dto = await self._build_resolved_dto(request) + resp = await self._client.post("/v1/entries", json=dto) + resp.raise_for_status() + return ElogEntryResult(id=self._extract_id(resp.json())) + + async def create_follow_up(self, parent_entry_id: str, request: ElogEntryRequest) -> ElogEntryResult: + dto = await self._build_resolved_dto(request) + resp = await self._client.post( + f"/v1/entries/{parent_entry_id}/follow-ups", + json=dto, + ) + resp.raise_for_status() + return ElogEntryResult(id=self._extract_id(resp.json())) + + async def close(self) -> None: + await self._client.aclose() diff --git a/app/services/elog/last_entry.py b/app/services/elog/last_entry.py new file mode 100644 index 0000000..cd4a561 --- /dev/null +++ b/app/services/elog/last_entry.py @@ -0,0 +1,53 @@ +"""Per-(api_key, logbook set) last-entry tracking for the elog plugin. + +Used by the PostToElogDialog to pre-fill the "Follow up previous post" field. +The same row advances on both fresh creates and follow-ups, so the chain +naturally walks forward as the operator posts hourly snapshots. +""" +import hashlib + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.dialects.postgresql import insert + +from app.models.elog_last_entry import ElogLastEntry + +_MAX_KEY_LEN = 1024 + + +def compute_logbooks_key(logbooks: list[str]) -> str: + """Deterministically derive the scope key from a logbook list. + + Trims, drops empties, dedupes, sorts, joins with ``\\n``. Falls back to a + SHA-256 prefix when the joined string would exceed the column width. + """ + cleaned = sorted({lb.strip() for lb in logbooks if lb and lb.strip()}) + if not cleaned: + raise ValueError("at least one logbook required") + key = "\n".join(cleaned) + if len(key) > _MAX_KEY_LEN: + return "sha256:" + hashlib.sha256(key.encode()).hexdigest() + return key + + +async def get_last_entry_id(db: AsyncSession, *, api_key_id: str, logbooks: list[str]) -> str | None: + key = compute_logbooks_key(logbooks) + stmt = select(ElogLastEntry.entry_id).where( + ElogLastEntry.api_key_id == api_key_id, + ElogLastEntry.logbooks_key == key, + ) + return (await db.execute(stmt)).scalar_one_or_none() + + +async def upsert_last_entry(db: AsyncSession, *, api_key_id: str, logbooks: list[str], entry_id: str) -> None: + key = compute_logbooks_key(logbooks) + stmt = ( + insert(ElogLastEntry) + .values(api_key_id=api_key_id, logbooks_key=key, entry_id=entry_id) + .on_conflict_do_update( + index_elements=["api_key_id", "logbooks_key"], + set_={"entry_id": entry_id}, + ) + ) + await db.execute(stmt) + await db.commit() diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 5f8dec6..57b70e2 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -53,6 +53,10 @@ services: EPICS_PVA_ADDR_LIST: "${EPICS_PVA_ADDR_LIST}" EPICS_PVA_SERVER_PORT: "${EPICS_PVA_SERVER_PORT}" EPICS_PVA_BROADCAST_PORT: "${EPICS_PVA_BROADCAST_PORT}" + + SQUIRREL_ELOG_PROVIDER: "${SQUIRREL_ELOG_PROVIDER}" + SQUIRREL_ELOG_PLUS_BASE_URL: "${SQUIRREL_ELOG_PLUS_BASE_URL}" + SQUIRREL_ELOG_PLUS_TOKEN: "${SQUIRREL_ELOG_PLUS_TOKEN}" ports: - "${API_HOST_PORT:-8080}:8000" volumes: diff --git a/docs/development/elog-plugin.md b/docs/development/elog-plugin.md new file mode 100644 index 0000000..d600023 --- /dev/null +++ b/docs/development/elog-plugin.md @@ -0,0 +1,158 @@ +# E-log Plugin Guide + +Squirrel posts snapshots to an electronic logbook through a pluggable adapter +so different labs can use different e-log systems. The in-tree adapter targets +[elog-plus](https://github.com/slaclab/elog-plus); writing your own is a matter +of subclassing [`ElogAdapter`][app.services.elog.base.ElogAdapter] and +registering it. + +## Enabling the shipped elog-plus adapter + +Set these environment variables (via `.env` or the shell): + +| Variable | Required | Default | Description | +|----------|----------|---------|-------------| +| `SQUIRREL_ELOG_PROVIDER` | yes | `""` (disabled) | `elog_plus` to enable the shipped adapter. | +| `SQUIRREL_ELOG_PLUS_BASE_URL` | yes | — | e.g. `https://elog.lab.org` | +| `SQUIRREL_ELOG_PLUS_TOKEN` | yes | — | Application JWT minted via elog-plus' `POST /v1/applications`. | +| `SQUIRREL_ELOG_PLUS_AUTH_HEADER` | no | `x-vouch-idp-accesstoken` | Matches `ELOG_PLUS_AUTH_HEADER` on the elog-plus service. | +| `SQUIRREL_ELOG_DEFAULT_LOGBOOKS` | no | `[]` | JSON list of logbook IDs preselected in the post dialog. | +| `SQUIRREL_ELOG_PROXY_URL` | no | `""` | Outbound HTTP(S) proxy for e-log calls (control-room gateways). | + +When `SQUIRREL_ELOG_PROVIDER` is empty or unknown, `GET /v1/elog/config` +returns `{enabled: false}` and the frontend hides the "Post to Elog" button. + +## Writing a new adapter + +An adapter is a class that implements three async methods. All inputs and +outputs are plain Pydantic models — you don't need to touch FastAPI. + +### 1. Subclass `ElogAdapter` + +Create a file under `app/services/elog/`, for example +`app/services/elog/mylab_elog.py`: + +```python +import httpx + +from app.services.elog.base import ( + ElogAdapter, + ElogEntryRequest, + ElogEntryResult, + ElogLogbook, + ElogTag, +) + + +class MyLabElogAdapter(ElogAdapter): + def __init__(self, base_url: str, token: str, proxy_url: str | None = None): + if not base_url or not token: + raise ValueError("base_url and token are required") + self._client = httpx.AsyncClient( + base_url=base_url.rstrip("/"), + headers={"Authorization": f"Bearer {token}"}, + timeout=15.0, + proxy=proxy_url or None, + trust_env=False, + ) + + async def list_logbooks(self) -> list[ElogLogbook]: + resp = await self._client.get("/logbooks") + resp.raise_for_status() + return [ElogLogbook(id=x["id"], name=x["name"]) for x in resp.json()] + + async def list_tags(self, logbook_id: str) -> list[ElogTag]: + resp = await self._client.get(f"/logbooks/{logbook_id}/tags") + resp.raise_for_status() + return [ElogTag(id=x["id"], name=x["name"]) for x in resp.json()] + + async def create_entry(self, request: ElogEntryRequest) -> ElogEntryResult: + body = f"Posted by {request.author} via Squirrel\n\n{request.body_markdown}" + resp = await self._client.post( + "/entries", + json={ + "logbooks": request.logbooks, + "title": request.title, + "body": body, + "tags": request.tags, + }, + ) + resp.raise_for_status() + return ElogEntryResult(id=resp.json()["id"]) + + async def close(self) -> None: + await self._client.aclose() +``` + +### 2. Register it + +Add a factory to `app/services/elog/__init__.py`: + +```python +from app.services.elog.mylab_elog import MyLabElogAdapter + + +def _build_mylab(settings: Settings) -> ElogAdapter: + return MyLabElogAdapter( + base_url=settings.mylab_elog_base_url, + token=settings.mylab_elog_token, + proxy_url=settings.elog_proxy_url or None, + ) + + +ELOG_PROVIDERS: dict[str, Callable[[Settings], ElogAdapter]] = { + "elog_plus": _build_elog_plus, + "mylab": _build_mylab, +} +``` + +### 3. Add settings + +In `app/config.py`: + +```python +mylab_elog_base_url: str = "" +mylab_elog_token: str = "" +``` + +### 4. Turn it on + +``` +SQUIRREL_ELOG_PROVIDER=mylab +SQUIRREL_MYLAB_ELOG_BASE_URL=https://elog.mylab.org +SQUIRREL_MYLAB_ELOG_TOKEN=... +``` + +## Contract + +`ElogAdapter` methods receive and return: + +- `ElogLogbook(id, name)` — a target the user can post into. IDs must round-trip + unchanged through `list_tags` and `create_entry`. +- `ElogTag(id, name)` — a tag attached to an entry within a logbook. +- `ElogEntryRequest` — what the frontend sends. Key fields: + - `logbooks: list[str]` — at least one logbook ID. + - `title: str` (≤ 255 chars) + - `body_markdown: str` — user-edited markdown. Convert to HTML here if your + e-log renders raw. + - `tags: list[str]` — tag IDs. + - `author: str` — stamped by the route from the API key's `appName`; not + trusted from the client. + - `snapshot_id: str | None` — for your own auditing if useful. +- `ElogEntryResult(id, url=None)` — returned to the frontend after posting. + +## Error handling + +Raise `httpx.HTTPStatusError` / `httpx.TimeoutException` for upstream failures. +The `/v1/elog` router translates these into `502` / `504` so the frontend can +surface a useful error. Validation errors on input fail before reaching the +adapter thanks to Pydantic. + +## Testing + +Implementations should follow +[`tests/test_services/test_elog_plus.py`](../../tests/test_services/test_elog_plus.py): +construct the adapter against an `httpx.MockTransport` so tests stay +hermetic. Route-level tests can use +[`tests/mocks/elog_mock.py::MockElogAdapter`](../../tests/mocks/elog_mock.py) +by overriding the `_get_elog_adapter` dependency. diff --git a/docs/development/index.md b/docs/development/index.md index d3b76ad..cea6c35 100644 --- a/docs/development/index.md +++ b/docs/development/index.md @@ -247,3 +247,4 @@ pre-commit run --all-files - [Testing](testing.md) - Running and writing tests - [Database Migrations](migrations.md) - Managing schema changes - [Code Quality](code-quality.md) - Linting and formatting +- [E-log Plugin](elog-plugin.md) - Writing an adapter for a different e-log backend diff --git a/mkdocs.yml b/mkdocs.yml index 96a10c3..eb5960c 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -114,6 +114,7 @@ nav: - Testing: development/testing.md - Database Migrations: development/migrations.md - Code Quality: development/code-quality.md + - E-log Plugin: development/elog-plugin.md - API Reference: - api-reference/index.md - REST Endpoints: api-reference/endpoints.md diff --git a/pyproject.toml b/pyproject.toml index eafddd8..0c1a918 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ dependencies = [ "arq>=0.26.0", "aiobreaker>=1.2.0", "greenlet>=3.3.0", + "markdown>=3.5", ] [project.optional-dependencies] diff --git a/tests/mocks/elog_mock.py b/tests/mocks/elog_mock.py new file mode 100644 index 0000000..dd5d4a0 --- /dev/null +++ b/tests/mocks/elog_mock.py @@ -0,0 +1,63 @@ +"""In-memory :class:`ElogAdapter` used by API tests.""" +from app.services.elog.base import ( + ElogTag, + ElogUser, + ElogAdapter, + ElogLogbook, + ElogEntryResult, + ElogEntryRequest, +) + +ELOG_DEFAULT_USERS = [ + ElogUser(uid="jdoe", commonName="Jane Doe", surname="Doe", gecos="Jane Doe", mail="jdoe@slac.stanford.edu"), + ElogUser( + uid="jsmith", commonName="John Smith", surname="Smith", gecos="John Smith", mail="jsmith@slac.stanford.edu" + ), +] + + +class MockElogAdapter(ElogAdapter): + def __init__( + self, + logbooks: list[ElogLogbook] | None = None, + tags_by_logbook: dict[str, list[ElogTag]] | None = None, + supports_follow_up: bool = True, + users: list[ElogUser] | None = None, + ): + self._logbooks = logbooks or [ELOG_DEFAULT_LOGBOOK] + self._tags_by_logbook = tags_by_logbook or { + ELOG_DEFAULT_LOGBOOK.id: [ELOG_DEFAULT_TAG], + } + self.supports_follow_up = supports_follow_up + self.created_entries: list[ElogEntryRequest] = [] + self.follow_ups: list[tuple[str, ElogEntryRequest]] = [] + self._users = users if users is not None else list(ELOG_DEFAULT_USERS) + self._next_id = 1 + + async def list_logbooks(self) -> list[ElogLogbook]: + return list(self._logbooks) + + async def list_tags(self, logbook_id: str) -> list[ElogTag]: + return list(self._tags_by_logbook.get(logbook_id, [])) + + async def create_entry(self, request: ElogEntryRequest) -> ElogEntryResult: + self.created_entries.append(request) + entry_id = f"entry-{self._next_id}" + self._next_id += 1 + return ElogEntryResult(id=entry_id) + + async def search_users(self, search: str, limit: int = 20) -> list[ElogUser]: + search_lower = search.lower() + return [u for u in self._users if search_lower in u.gecos.lower() or search_lower in u.mail.lower()][:limit] + + async def create_follow_up(self, parent_entry_id: str, request: ElogEntryRequest) -> ElogEntryResult: + if not self.supports_follow_up: + raise NotImplementedError("Mock adapter has follow-up disabled") + self.follow_ups.append((parent_entry_id, request)) + entry_id = f"entry-{self._next_id}" + self._next_id += 1 + return ElogEntryResult(id=entry_id) + + +ELOG_DEFAULT_LOGBOOK = ElogLogbook(id="logbook-1", name="Operations") +ELOG_DEFAULT_TAG = ElogTag(id="tag-1", name="routine") diff --git a/tests/test_api/test_elog.py b/tests/test_api/test_elog.py new file mode 100644 index 0000000..1cc15b4 --- /dev/null +++ b/tests/test_api/test_elog.py @@ -0,0 +1,254 @@ +"""Route tests for /v1/elog/*. + +Uses a local ``client`` fixture that skips the Postgres setup in the root +conftest — the e-log routes do not touch the DB directly; auth is stubbed. +""" +from datetime import datetime + +import pytest_asyncio +from httpx import AsyncClient, ASGITransport + +from app.main import app +from app.db.session import get_db +from app.api.v1.elog import _get_elog_adapter +from app.dependencies import get_api_key +from app.schemas.api_key import ApiKeyDTO +from tests.mocks.elog_mock import ( + ELOG_DEFAULT_TAG, + ELOG_DEFAULT_LOGBOOK, + MockElogAdapter, +) + + +@pytest_asyncio.fixture +async def client() -> AsyncClient: # overrides the root conftest fixture for this module + async def override_get_db(): + yield None # Not used by /v1/elog/* routes + + async def override_get_api_key() -> ApiKeyDTO: + return ApiKeyDTO( + id="test-key-id", + appName="TestClient", + isActive=True, + readAccess=True, + writeAccess=True, + createdAt=datetime.now(), + updatedAt=datetime.now(), + ) + + app.dependency_overrides[get_db] = override_get_db + app.dependency_overrides[get_api_key] = override_get_api_key + + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as c: + yield c + + app.dependency_overrides.pop(get_db, None) + app.dependency_overrides.pop(get_api_key, None) + + +@pytest_asyncio.fixture +async def mock_elog(): + """Override the e-log dependency with a MockElogAdapter.""" + adapter = MockElogAdapter() + app.dependency_overrides[_get_elog_adapter] = lambda: adapter + yield adapter + app.dependency_overrides.pop(_get_elog_adapter, None) + + +@pytest_asyncio.fixture +async def disable_elog(): + """Override the e-log dependency to return ``None`` (disabled).""" + app.dependency_overrides[_get_elog_adapter] = lambda: None + yield + app.dependency_overrides.pop(_get_elog_adapter, None) + + +class TestElogConfig: + async def test_config_disabled_by_default(self, client: AsyncClient, disable_elog): + resp = await client.get("/v1/elog/config") + assert resp.status_code == 200 + body = resp.json() + assert body["enabled"] is False + assert body["provider"] == "" + + async def test_config_enabled_when_adapter_configured(self, client: AsyncClient, mock_elog): + # Monkey-patch get_elog_service to match adapter install + resp = await client.get("/v1/elog/config") + assert resp.status_code == 200 + body = resp.json() + assert body["enabled"] is True + + +class TestElogLogbooksAndTags: + async def test_returns_503_when_disabled(self, client: AsyncClient, disable_elog): + resp = await client.get("/v1/elog/logbooks") + assert resp.status_code == 503 + + async def test_list_logbooks(self, client: AsyncClient, mock_elog): + resp = await client.get("/v1/elog/logbooks") + assert resp.status_code == 200 + body = resp.json() + assert body == [{"id": ELOG_DEFAULT_LOGBOOK.id, "name": ELOG_DEFAULT_LOGBOOK.name}] + + async def test_list_tags_for_logbook(self, client: AsyncClient, mock_elog): + resp = await client.get(f"/v1/elog/logbooks/{ELOG_DEFAULT_LOGBOOK.id}/tags") + assert resp.status_code == 200 + body = resp.json() + assert body == [{"id": ELOG_DEFAULT_TAG.id, "name": ELOG_DEFAULT_TAG.name}] + + async def test_list_tags_unknown_logbook_returns_empty(self, client: AsyncClient, mock_elog): + resp = await client.get("/v1/elog/logbooks/does-not-exist/tags") + assert resp.status_code == 200 + assert resp.json() == [] + + +class TestCreateEntry: + async def test_returns_503_when_disabled(self, client: AsyncClient, disable_elog): + resp = await client.post( + "/v1/elog/entries", + json={ + "logbooks": ["logbook-1"], + "title": "Test", + "bodyMarkdown": "body", + }, + ) + assert resp.status_code == 503 + + async def test_create_entry(self, client: AsyncClient, mock_elog): + resp = await client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "Snapshot: test", + "bodyMarkdown": "# hello", + "tags": [ELOG_DEFAULT_TAG.id], + "snapshotId": "abc-123", + }, + ) + assert resp.status_code == 200 + body = resp.json() + assert body["id"] == "entry-1" + assert len(mock_elog.created_entries) == 1 + + stored = mock_elog.created_entries[0] + assert stored.title == "Snapshot: test" + assert stored.body_markdown == "# hello" + assert stored.logbooks == [ELOG_DEFAULT_LOGBOOK.id] + assert stored.tags == [ELOG_DEFAULT_TAG.id] + assert stored.snapshot_id == "abc-123" + # Author is stamped from the API key, not trusted from the client payload. + assert stored.author == "TestClient" + + async def test_client_cannot_override_author(self, client: AsyncClient, mock_elog): + # Extra "author" field is ignored by the DTO. + resp = await client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "Title", + "bodyMarkdown": "body", + "author": "hacker", + }, + ) + assert resp.status_code == 200 + assert mock_elog.created_entries[-1].author == "TestClient" + + async def test_validation_requires_at_least_one_logbook(self, client: AsyncClient, mock_elog): + resp = await client.post( + "/v1/elog/entries", + json={"logbooks": [], "title": "T", "bodyMarkdown": "body"}, + ) + assert resp.status_code == 422 + + async def test_validation_rejects_empty_title(self, client: AsyncClient, mock_elog): + resp = await client.post( + "/v1/elog/entries", + json={"logbooks": ["logbook-1"], "title": "", "bodyMarkdown": "body"}, + ) + assert resp.status_code == 422 + + async def test_extra_fields_passed_through(self, client: AsyncClient, mock_elog): + resp = await client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "T", + "bodyMarkdown": "b", + "additionalAuthors": ["alice@lab", "bob@lab"], + "important": True, + "eventAt": "2026-04-29T09:30:15", + }, + ) + assert resp.status_code == 200 + stored = mock_elog.created_entries[-1] + assert stored.additional_authors == ["alice@lab", "bob@lab"] + assert stored.important is True + assert stored.event_at == datetime(2026, 4, 29, 9, 30, 15) + + +class TestFollowUp: + async def test_routes_to_create_follow_up_when_id_provided(self, client: AsyncClient, mock_elog): + resp = await client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "Hourly", + "bodyMarkdown": "body", + "followsUpEntryId": "parent-1", + }, + ) + assert resp.status_code == 200 + assert mock_elog.created_entries == [] + assert len(mock_elog.follow_ups) == 1 + parent_id, request = mock_elog.follow_ups[0] + assert parent_id == "parent-1" + assert request.title == "Hourly" + assert request.author == "TestClient" + + async def test_returns_501_when_provider_unsupported(self, client: AsyncClient): + adapter = MockElogAdapter(supports_follow_up=False) + app.dependency_overrides[_get_elog_adapter] = lambda: adapter + try: + resp = await client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "T", + "bodyMarkdown": "b", + "followsUpEntryId": "parent-1", + }, + ) + assert resp.status_code == 501 + finally: + app.dependency_overrides.pop(_get_elog_adapter, None) + + +class TestSearchUsers: + async def test_returns_503_when_disabled(self, client: AsyncClient, disable_elog): + resp = await client.get("/v1/elog/users", params={"search": "jane"}) + assert resp.status_code == 503 + + async def test_search_by_name(self, client: AsyncClient, mock_elog): + resp = await client.get("/v1/elog/users", params={"search": "jane"}) + assert resp.status_code == 200 + body = resp.json() + assert len(body) == 1 + assert body[0]["uid"] == "jdoe" + assert body[0]["gecos"] == "Jane Doe" + assert body[0]["mail"] == "jdoe@slac.stanford.edu" + + async def test_search_by_email(self, client: AsyncClient, mock_elog): + resp = await client.get("/v1/elog/users", params={"search": "jsmith@"}) + assert resp.status_code == 200 + body = resp.json() + assert len(body) == 1 + assert body[0]["uid"] == "jsmith" + + async def test_search_no_match(self, client: AsyncClient, mock_elog): + resp = await client.get("/v1/elog/users", params={"search": "nobody"}) + assert resp.status_code == 200 + assert resp.json() == [] + + async def test_search_requires_query(self, client: AsyncClient, mock_elog): + resp = await client.get("/v1/elog/users") + assert resp.status_code == 422 diff --git a/tests/test_api/test_elog_last_entry.py b/tests/test_api/test_elog_last_entry.py new file mode 100644 index 0000000..1ef984c --- /dev/null +++ b/tests/test_api/test_elog_last_entry.py @@ -0,0 +1,191 @@ +"""DB-backed tests for the elog last-entry tracking. + +Uses the root conftest's Postgres fixtures because the GET /last-entry route +and the upsert in POST /entries both touch the database. An ``ApiKey`` row is +inserted to satisfy the FK from ``elog_last_entry``. +""" +from uuid import uuid4 +from datetime import datetime + +import pytest_asyncio +from httpx import AsyncClient, ASGITransport +from sqlalchemy.ext.asyncio import AsyncSession + +from app.main import app +from app.db.session import get_db +from app.api.v1.elog import _get_elog_adapter +from app.dependencies import get_api_key +from app.models.api_key import ApiKey +from app.schemas.api_key import ApiKeyDTO +from tests.mocks.elog_mock import ELOG_DEFAULT_LOGBOOK, MockElogAdapter + +PRIMARY_KEY_ID = str(uuid4()) +SECONDARY_KEY_ID = str(uuid4()) + + +async def _insert_api_key(db: AsyncSession, *, key_id: str, app_name: str) -> None: + db.add( + ApiKey( + id=key_id, + app_name=app_name, + token_hash=f"hash-{key_id}", + is_active=True, + read_access=True, + write_access=True, + ) + ) + await db.commit() + + +@pytest_asyncio.fixture +async def primary_client(db_session: AsyncSession) -> AsyncClient: + """Client that authenticates as PRIMARY_KEY_ID.""" + await _insert_api_key(db_session, key_id=PRIMARY_KEY_ID, app_name="PrimaryClient") + + async def override_get_db(): + yield db_session + + async def override_get_api_key() -> ApiKeyDTO: + return ApiKeyDTO( + id=PRIMARY_KEY_ID, + appName="PrimaryClient", + isActive=True, + readAccess=True, + writeAccess=True, + createdAt=datetime.now(), + updatedAt=datetime.now(), + ) + + app.dependency_overrides[get_db] = override_get_db + app.dependency_overrides[get_api_key] = override_get_api_key + + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as c: + yield c + + app.dependency_overrides.pop(get_db, None) + app.dependency_overrides.pop(get_api_key, None) + + +@pytest_asyncio.fixture +async def mock_elog(): + adapter = MockElogAdapter() + app.dependency_overrides[_get_elog_adapter] = lambda: adapter + yield adapter + app.dependency_overrides.pop(_get_elog_adapter, None) + + +class TestGetLastEntry: + async def test_returns_null_when_unknown(self, primary_client: AsyncClient, mock_elog): + resp = await primary_client.get("/v1/elog/last-entry", params={"logbook": ["logbook-1"]}) + assert resp.status_code == 200 + assert resp.json() == {"entryId": None} + + async def test_records_after_create_and_returns_same_id(self, primary_client: AsyncClient, mock_elog): + post_resp = await primary_client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "Morning", + "bodyMarkdown": "body", + }, + ) + assert post_resp.status_code == 200 + entry_id = post_resp.json()["id"] + + get_resp = await primary_client.get("/v1/elog/last-entry", params={"logbook": [ELOG_DEFAULT_LOGBOOK.id]}) + assert get_resp.status_code == 200 + assert get_resp.json() == {"entryId": entry_id} + + async def test_logbook_query_param_is_order_independent(self, primary_client: AsyncClient, mock_elog): + post_resp = await primary_client.post( + "/v1/elog/entries", + json={ + "logbooks": ["lb-A", "lb-B"], + "title": "T", + "bodyMarkdown": "b", + }, + ) + assert post_resp.status_code == 200 + entry_id = post_resp.json()["id"] + + # Reverse order on lookup must hit the same scope key. + get_resp = await primary_client.get("/v1/elog/last-entry", params={"logbook": ["lb-B", "lb-A"]}) + assert get_resp.json() == {"entryId": entry_id} + + async def test_chain_advances_on_follow_up(self, primary_client: AsyncClient, mock_elog): + first = await primary_client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "Morning", + "bodyMarkdown": "b", + }, + ) + first_id = first.json()["id"] + + second = await primary_client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "Hourly 1", + "bodyMarkdown": "b", + "followsUpEntryId": first_id, + }, + ) + second_id = second.json()["id"] + assert second_id != first_id + + get_resp = await primary_client.get("/v1/elog/last-entry", params={"logbook": [ELOG_DEFAULT_LOGBOOK.id]}) + assert get_resp.json() == {"entryId": second_id} + + async def test_scoped_per_logbook_set(self, primary_client: AsyncClient, mock_elog): + ops_post = await primary_client.post( + "/v1/elog/entries", + json={"logbooks": ["ops"], "title": "T", "bodyMarkdown": "b"}, + ) + comm_post = await primary_client.post( + "/v1/elog/entries", + json={"logbooks": ["commissioning"], "title": "T", "bodyMarkdown": "b"}, + ) + + ops_get = await primary_client.get("/v1/elog/last-entry", params={"logbook": ["ops"]}) + comm_get = await primary_client.get("/v1/elog/last-entry", params={"logbook": ["commissioning"]}) + assert ops_get.json() == {"entryId": ops_post.json()["id"]} + assert comm_get.json() == {"entryId": comm_post.json()["id"]} + + async def test_scoped_per_api_key(self, primary_client: AsyncClient, mock_elog, db_session: AsyncSession): + # Primary client posts. + post_resp = await primary_client.post( + "/v1/elog/entries", + json={ + "logbooks": [ELOG_DEFAULT_LOGBOOK.id], + "title": "T", + "bodyMarkdown": "b", + }, + ) + assert post_resp.status_code == 200 + + # Switch auth override to a second key — should see no last entry. + await _insert_api_key(db_session, key_id=SECONDARY_KEY_ID, app_name="SecondaryClient") + + async def override_get_api_key() -> ApiKeyDTO: + return ApiKeyDTO( + id=SECONDARY_KEY_ID, + appName="SecondaryClient", + isActive=True, + readAccess=True, + writeAccess=True, + createdAt=datetime.now(), + updatedAt=datetime.now(), + ) + + app.dependency_overrides[get_api_key] = override_get_api_key + try: + get_resp = await primary_client.get( + "/v1/elog/last-entry", + params={"logbook": [ELOG_DEFAULT_LOGBOOK.id]}, + ) + assert get_resp.json() == {"entryId": None} + finally: + # Other tests may rely on the primary override; restore via fixture teardown. + pass diff --git a/tests/test_services/test_elog_plus.py b/tests/test_services/test_elog_plus.py new file mode 100644 index 0000000..8e0fe34 --- /dev/null +++ b/tests/test_services/test_elog_plus.py @@ -0,0 +1,401 @@ +"""Unit tests for :class:`ElogPlusAdapter` against a fake transport.""" +import json +from datetime import datetime + +import httpx +import pytest + +from app.services.elog.base import ElogEntryRequest +from app.services.elog.elog_plus import ElogPlusAdapter + + +def _make_adapter(handler, **overrides) -> ElogPlusAdapter: + """Build an adapter whose HTTP client routes requests through ``handler``.""" + adapter = ElogPlusAdapter( + base_url="http://elog.test", + token="test-token", + **overrides, + ) + adapter._client = httpx.AsyncClient( + base_url="http://elog.test", + headers={adapter._auth_header: "test-token"}, + transport=httpx.MockTransport(handler), + trust_env=False, + ) + return adapter + + +class TestListLogbooks: + async def test_parses_envelope(self): + def handler(request: httpx.Request) -> httpx.Response: + assert request.url.path == "/v1/logbooks" + assert request.headers["x-vouch-idp-accesstoken"] == "test-token" + return httpx.Response( + 200, + json={ + "errorCode": 0, + "payload": [ + {"id": "lb1", "name": "Ops", "tags": []}, + {"id": "lb2", "name": "Commissioning", "tags": []}, + ], + }, + ) + + adapter = _make_adapter(handler) + try: + logbooks = await adapter.list_logbooks() + finally: + await adapter.close() + + assert [lb.id for lb in logbooks] == ["lb1", "lb2"] + assert [lb.name for lb in logbooks] == ["Ops", "Commissioning"] + + async def test_tolerates_missing_envelope(self): + def handler(request: httpx.Request) -> httpx.Response: + return httpx.Response(200, json=[{"id": "lb1", "name": "Ops"}]) + + adapter = _make_adapter(handler) + try: + logbooks = await adapter.list_logbooks() + finally: + await adapter.close() + + assert len(logbooks) == 1 + assert logbooks[0].id == "lb1" + + +def _logbooks_directory_response() -> httpx.Response: + """elog-plus' /v1/logbooks listing used by adapter id↔name resolvers.""" + return httpx.Response( + 200, + json={ + "errorCode": 0, + "payload": [ + {"id": "lb1", "name": "Operations"}, + {"id": "lb2", "name": "Commissioning"}, + ], + }, + ) + + +def _tags_directory_response_for_lb1() -> httpx.Response: + return httpx.Response( + 200, + json={ + "errorCode": 0, + "payload": [ + {"id": "t1", "name": "Routine"}, + {"id": "t2", "name": "LCLS"}, + ], + }, + ) + + +class TestCreateEntry: + async def test_posts_v1_json_with_author_attribution(self): + captured: dict = {} + + def handler(request: httpx.Request) -> httpx.Response: + if request.url.path == "/v1/logbooks": + return _logbooks_directory_response() + if request.url.path == "/v1/logbooks/lb1/tags": + return _tags_directory_response_for_lb1() + assert request.url.path == "/v1/entries" + assert request.method == "POST" + assert request.headers["content-type"].startswith("application/json") + captured["body"] = json.loads(request.content) + return httpx.Response(201, json={"errorCode": 0, "payload": "entry-abc"}) + + adapter = _make_adapter(handler) + # Frontend sends IDs; adapter passes IDs through to v1. + req = ElogEntryRequest( + logbooks=["lb1"], + title="Snapshot: foo", + body_markdown="# hello", + tags=["t1"], + author="ConsoleUser", + snapshot_id="snap-123", + ) + + try: + result = await adapter.create_entry(req) + finally: + await adapter.close() + + assert result.id == "entry-abc" + body = captured["body"] + assert body["logbooks"] == ["lb1"] + assert body["tags"] == ["t1"] + assert body["title"] == "Snapshot: foo" + assert "Posted by ConsoleUser via Squirrel" in body["text"] + assert "