Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 29 additions & 2 deletions .codex/config.toml
Original file line number Diff line number Diff line change
@@ -1,19 +1,46 @@
model = "gpt-5.4"
model_reasoning_effort = "medium"
model_reasoning_summary = "concise"
model_verbosity = "low"

approval_policy = "on-request"
sandbox_mode = "workspace-write"

[sandbox_workspace_write]
network_access = false

[tools]
web_search = false

[profiles.quick]
model = "gpt-5.4"
model_reasoning_effort = "low"
model_reasoning_summary = "concise"
model_verbosity = "low"
approval_policy = "on-request"
sandbox_mode = "workspace-write"

[profiles.review]
model = "gpt-5.4"
model_reasoning_effort = "high"
plan_mode_reasoning_effort = "high"
model_reasoning_summary = "concise"
model_verbosity = "medium"
approval_policy = "on-request"
sandbox_mode = "workspace-write"

[profiles.quick]
[profiles.readonly]
model = "gpt-5.4"
model_reasoning_effort = "low"
model_reasoning_effort = "medium"
model_reasoning_summary = "concise"
model_verbosity = "low"
approval_policy = "on-request"
sandbox_mode = "read-only"

[profiles.docs]
model = "gpt-5.4"
model_reasoning_effort = "medium"
model_reasoning_summary = "concise"
model_verbosity = "medium"
approval_policy = "on-request"
sandbox_mode = "workspace-write"
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,17 @@ Home server
Backend + DB
```

## Observability

Human Engine includes a minimal observability layer for backend logs:

- structured JSON logging in the FastAPI backend
- Promtail pipeline with docker log parsing and JSON extraction
- Loki for log storage
- Grafana dashboard for API requests, HealthKit sync, readiness recompute, errors, and pipeline trace

See: [docs/architecture/OBSERVABILITY.md](docs/architecture/OBSERVABILITY.md)

## Architecture Principles

- Simplicity over complexity
Expand All @@ -146,6 +157,7 @@ docs/ system documentation

- [backend/README.md](backend/README.md)
- [docs/architecture/ARCHITECTURE.md](docs/architecture/ARCHITECTURE.md)
- [docs/architecture/OBSERVABILITY.md](docs/architecture/OBSERVABILITY.md)
- [backend/ROADMAP.md](backend/ROADMAP.md)
- [docs/models/model_v2_architecture.md](docs/models/model_v2_architecture.md)
- [docs/product/CURRENT_STATE.md](docs/product/CURRENT_STATE.md)
Expand Down
159 changes: 156 additions & 3 deletions backend/backend/app.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
from __future__ import annotations

import json
import logging
import time
import uuid
from datetime import datetime
from typing import Any

import psycopg
import requests
from fastapi import FastAPI, HTTPException, Query, Request
from fastapi.responses import RedirectResponse
from fastapi.responses import JSONResponse, RedirectResponse
from pydantic import BaseModel, Field

from backend.config import settings
from backend.core.logging import configure_logging, log_event
from backend.db import get_conn
from backend.services.fitness_service import recompute_fitness_state
from backend.services.ingest_service import process_one_strava_ingest_job
Expand Down Expand Up @@ -39,6 +43,9 @@
from backend.services.healthkit_pipeline import ingest_and_process_healthkit_payload
from backend.services.readiness_query import get_readiness_daily_for_date

configure_logging()
logger = logging.getLogger(__name__)

app = FastAPI(title="Human Engine API", version="0.1.0")


Expand Down Expand Up @@ -83,6 +90,106 @@ class AIRideBriefingResponse(BaseModel):
data: AIRideBriefingData


def _get_request_id(request: Request) -> str:
request_id = getattr(request.state, "request_id", None)
if request_id:
return request_id

request_id = request.headers.get("x-request-id") or str(uuid.uuid4())
request.state.request_id = request_id
return request_id


def _extract_user_id(request: Request) -> str | None:
path_user_id = request.path_params.get("user_id")
if path_user_id:
return str(path_user_id)

header_user_id = request.headers.get("x-user-id")
if header_user_id:
return header_user_id

return None


@app.middleware("http")
async def request_logging_middleware(request: Request, call_next):
started_at = time.perf_counter()
request_id = _get_request_id(request)
user_id = _extract_user_id(request)
request.state.user_id = user_id

log_event(
logger,
"api_request_started",
method=request.method,
path=request.url.path,
request_id=request_id,
user_id=user_id,
)

try:
response = await call_next(request)
except Exception as exc:
duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
log_event(
logger,
"error",
level=logging.ERROR,
error_type=type(exc).__name__,
error=str(exc),
context="request_exception",
method=request.method,
path=request.url.path,
request_id=request_id,
user_id=_extract_user_id(request),
duration_ms=duration_ms,
)
raise

duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
user_id = _extract_user_id(request)
request.state.user_id = user_id
response.headers["X-Request-ID"] = request_id

log_event(
logger,
"api_request_finished",
method=request.method,
path=request.url.path,
status_code=response.status_code,
duration_ms=duration_ms,
request_id=request_id,
user_id=user_id,
)

if response.status_code >= 400:
log_event(
logger,
"error",
level=logging.ERROR,
error_type="HTTPError",
error=f"request failed with status {response.status_code}",
context="http_response",
method=request.method,
path=request.url.path,
status_code=response.status_code,
request_id=request_id,
user_id=user_id,
)

return response


@app.exception_handler(Exception)
async def unhandled_exception_handler(request: Request, exc: Exception):
return JSONResponse(
status_code=500,
content={"detail": f"internal server error: {type(exc).__name__}"},
headers={"X-Request-ID": _get_request_id(request)},
)


@app.get("/healthz")
def healthz():
return {"ok": True}
Expand Down Expand Up @@ -267,6 +374,17 @@ async def strava_webhook_receive(request: Request):
if not object_type or object_id is None or owner_id is None or event_time_unix is None:
raise HTTPException(status_code=400, detail="invalid strava webhook payload")

log_event(
logger,
"strava_webhook_received",
request_id=_get_request_id(request),
owner_id=owner_id,
object_id=object_id,
object_type=object_type,
aspect_type=aspect_type,
subscription_id=subscription_id,
)

dedupe_key = (
f"strava:{subscription_id}:{owner_id}:"
f"{object_type}:{object_id}:{aspect_type}:{event_time_unix}"
Expand Down Expand Up @@ -356,6 +474,21 @@ async def strava_webhook_receive(request: Request):
f"webhook_{aspect_type}",
),
)
job_row = cur.fetchone()
job_id = job_row[0] if job_row else None

if job_id is not None:
log_event(
logger,
"strava_ingest_job_created",
request_id=_get_request_id(request),
user_id=user_id,
job_id=job_id,
owner_id=owner_id,
activity_id=object_id,
aspect_type=aspect_type,
webhook_event_id=webhook_event_id,
)

conn.commit()

Expand Down Expand Up @@ -1217,12 +1350,32 @@ def ingest_and_process_healthkit_payload_endpoint(user_id: str, payload: HealthS

@app.post("/api/v1/healthkit/full-sync/{user_id}")
def full_sync_healthkit_payload_endpoint(user_id: str, payload: HealthSyncPayload):
started_at = time.perf_counter()
counts = {
"sleep": len(payload.sleepNights),
"hrv": len(payload.hrvSamples),
"rhr": len(payload.restingHeartRateDaily),
}

log_event(
logger,
"healthkit_full_sync_started",
user_id=user_id,
counts=counts,
)

try:
result = ingest_and_process_healthkit_payload(
user_id=user_id,
payload=payload,
)
print("FULL_SYNC_RESULT:", result)
log_event(
logger,
"healthkit_full_sync_finished",
user_id=user_id,
counts=counts,
duration_ms=round((time.perf_counter() - started_at) * 1000, 2),
)
return result
except Exception as e:
raise HTTPException(
Expand All @@ -1236,4 +1389,4 @@ def get_readiness_daily_endpoint(user_id: str, target_date: str):
return get_readiness_daily_for_date(
user_id=user_id,
target_date=target_date,
)
)
1 change: 1 addition & 0 deletions backend/backend/core/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Core backend utilities."""
76 changes: 76 additions & 0 deletions backend/backend/core/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from __future__ import annotations

import json
import logging
import sys
from datetime import datetime, timezone
from typing import Any


SERVICE_NAME = "human-engine-backend"
_RESERVED_RECORD_FIELDS = set(logging.makeLogRecord({}).__dict__.keys())


def _json_default(value: Any) -> Any:
if isinstance(value, datetime):
return value.astimezone(timezone.utc).isoformat().replace("+00:00", "Z")
return str(value)


class JsonFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
payload: dict[str, Any] = {
"timestamp": datetime.fromtimestamp(
record.created,
tz=timezone.utc,
).isoformat(timespec="milliseconds").replace("+00:00", "Z"),
"level": record.levelname,
"service": getattr(record, "service", SERVICE_NAME),
}

event = getattr(record, "event", None)
if event:
payload["event"] = event

message = record.getMessage()
if message:
payload["message"] = message

for key, value in record.__dict__.items():
if key in _RESERVED_RECORD_FIELDS or key.startswith("_"):
continue
if key in payload or value is None:
continue
payload[key] = value

if record.exc_info:
payload["exception"] = self.formatException(record.exc_info)

return json.dumps(payload, ensure_ascii=False, default=_json_default)


def configure_logging(level: int = logging.INFO) -> None:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(JsonFormatter())

root_logger = logging.getLogger()
root_logger.handlers.clear()
root_logger.setLevel(level)
root_logger.addHandler(handler)

for logger_name in ("uvicorn", "uvicorn.access", "uvicorn.error"):
logger = logging.getLogger(logger_name)
logger.handlers.clear()
logger.propagate = True
logger.setLevel(level)


def log_event(logger: logging.Logger, event: str, **kwargs: Any) -> None:
level = kwargs.pop("level", logging.INFO)
message = kwargs.pop("message", "")
extra = {
"service": SERVICE_NAME,
"event": event,
**{key: value for key, value in kwargs.items() if value is not None},
}
logger.log(level, message, extra=extra)
Loading
Loading