diff --git a/docker-compose.yml b/docker-compose.yml index ae75ea18d..ec29de645 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -84,13 +84,10 @@ services: # cuda / continuum-core-vulkan overlays) it's the actual ceiling. mem_limit: ${CONTINUUM_CORE_MEM:-16g} working_dir: /app - # depends_on does NOT include postgres — postgres is opt-in (profile), - # and by default continuum-core uses SQLite where no startup ordering - # matters. When users enable the postgres profile and set DATABASE_URL, - # Rust's PostgresAdapter (deadpool pool) retries connection on startup. - depends_on: - livekit-bridge: - condition: service_healthy + # No depends_on for services behind profiles (postgres, livekit-bridge). + # Core starts independently; connections to optional services (postgres + # pool, livekit bridge socket) retry on demand. Text chat works without + # any profile active — voice/video requires `--profile live`. volumes: - voice-models:/app/models:ro # Mount the ENTIRE ~/.continuum directory R/W. The Rust core reads config, @@ -130,15 +127,18 @@ services: # ── LiveKit Bridge (Rust — WebRTC transport adapter) ────── # Links webrtc-sys but NOT ort. Separate process eliminates # the protobuf symbol conflict that deadlocked continuum-core. + # + # Behind `live` profile: voice/video chat is opt-in. Text chat (the + # default first-chat experience) doesn't need LiveKit at all. This + # saves ~300MB RAM + 3 ports (7880-7882) for Carl's first run. + # Enable with: docker compose --profile live up livekit-bridge: + profiles: [live] build: context: ./src/workers dockerfile: ../../docker/livekit-bridge.Dockerfile image: ghcr.io/cambriantech/continuum-livekit-bridge:${CONTINUUM_IMAGE_TAG:-latest} restart: unless-stopped - # WebRTC encode/decode buffers + multi-stream. Scales with host RAM — - # install.sh sets LIVEKIT_BRIDGE_MEM to max(2, host_gb/8). Default 2g - # for manual docker compose users; install.sh writes the calculated one. mem_limit: ${LIVEKIT_BRIDGE_MEM:-2g} depends_on: - livekit @@ -184,7 +184,12 @@ services: - NODE_ENV=production - JTAG_SKIP_HTTP=1 - JTAG_NO_TLS=1 - - LIVEKIT_URL=${LIVEKIT_BROWSER_URL:-ws://livekit:7880} + # Browser connects to LiveKit via host-mapped port, not Docker DNS. + # 'ws://livekit:7880' only resolves inside the Docker network; + # the browser runs on the host where 'livekit' doesn't resolve. + # localhost:7880 works because livekit binds that port to the host. + # Grid mode overrides via LIVEKIT_BROWSER_URL=ws://tailscale:7880. + - LIVEKIT_URL=${LIVEKIT_BROWSER_URL:-ws://localhost:7880} # ── Widget Server (Vite) ────────────────────────────────── widget-server: @@ -208,10 +213,11 @@ services: - JTAG_WS_PROXY_PORT=9001 # ── LiveKit (WebRTC) — local mode ─────────────────────────── - # Dev server for local development. Always starts. - # In grid mode, set LIVEKIT_HOST_PORT=0 in .env to avoid port conflict with tailscale. - # (LiveKit still runs but on unmapped ports — harmless, ~50MB RAM.) + # Dev server for voice/video. Behind `live` profile — text chat doesn't + # need it. In grid mode, set LIVEKIT_HOST_PORT=0 to avoid port conflict. + # Enable with: docker compose --profile live up livekit: + profiles: [live] image: livekit/livekit-server:latest restart: unless-stopped mem_limit: 256m