diff --git a/.gcloudignore b/.gcloudignore new file mode 100644 index 00000000..b924691d --- /dev/null +++ b/.gcloudignore @@ -0,0 +1,14 @@ +** +!Dockerfile.ghl +!cloudbuild.ghl.yaml +!Makefile.cbm +!REPOS.yaml +!REPOS.local.yaml +!src +!src/** +!internal +!internal/** +!vendored +!vendored/** +!ghl +!ghl/** diff --git a/Dockerfile.ghl b/Dockerfile.ghl new file mode 100644 index 00000000..34708034 --- /dev/null +++ b/Dockerfile.ghl @@ -0,0 +1,90 @@ +# Dockerfile.ghl — GHL fleet server +# +# Multi-stage build: +# stage 1 (cbm): download pre-built codebase-memory-mcp binary for linux/amd64 +# stage 2 (build): compile the Go fleet server +# stage 3 (run): minimal runtime image + +# ── Stage 1: codebase-memory-mcp binary ────────────────────────── +FROM debian:12-slim AS cbm + +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + git \ + pkg-config \ + zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /src + +COPY Makefile.cbm ./ +COPY src/ ./src/ +COPY internal/ ./internal/ +COPY vendored/ ./vendored/ + +RUN make -f Makefile.cbm cbm && \ + install -m 0755 build/c/codebase-memory-mcp /usr/local/bin/codebase-memory-mcp + +# ── Stage 2: Go fleet server ────────────────────────────────────── +FROM golang:1.25-alpine AS build + +WORKDIR /src + +# Cache dependencies first +COPY ghl/go.mod ghl/go.sum ./ +RUN go mod download + +# Copy source +COPY ghl/ ./ + +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \ + go build -trimpath -ldflags="-s -w" \ + -o /app/ghl-fleet ./cmd/server + +# ── Stage 3: Runtime ────────────────────────────────────────────── +# Use debian-slim (not distroless) so git is available for repo cloning +FROM debian:12-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + git \ + ca-certificates \ + openssh-client \ + && rm -rf /var/lib/apt/lists/* + +# Copy binaries +COPY --from=cbm /usr/local/bin/codebase-memory-mcp /app/codebase-memory-mcp +COPY --from=build /app/ghl-fleet /app/ghl-fleet + +# Copy default manifest +COPY REPOS.yaml /app/REPOS.yaml +COPY REPOS.local.yaml /app/REPOS.local.yaml + +# Git: trust all dirs (needed when running as non-root in containers) +RUN git config --global --add safe.directory '*' + +WORKDIR /app + +# ── Defaults (all overridable via env) ─────────────────────────── +ENV PORT=8080 \ + CBM_MMAP_SIZE=0 \ + CBM_BINARY=/app/codebase-memory-mcp \ + CBM_CACHE_DIR=/tmp/codebase-memory-mcp \ + CBM_ARTIFACT_DIR=/data/fleet-cache/indexes \ + FLEET_CACHE_DIR=/tmp/fleet-repos \ + PROJECT_NAME_PREFIX=data-fleet-cache-repos \ + REPOS_MANIFEST=/app/REPOS.local.yaml \ + BRIDGE_CLIENTS=8 \ + BRIDGE_ACQUIRE_TIMEOUT_MS=3000 \ + FLEET_CONCURRENCY=8 \ + INDEXER_CLIENTS=8 \ + STARTUP_INDEX_ENABLED=false \ + SCHEDULED_INDEXING_ENABLED=false \ + CRON_INCREMENTAL="0 */6 * * *" \ + CRON_FULL="0 2 * * 0" + +EXPOSE 8080 + +VOLUME ["/data/fleet-cache"] + +ENTRYPOINT ["/app/ghl-fleet"] diff --git a/REPOS.local.yaml b/REPOS.local.yaml new file mode 100644 index 00000000..bbfd9eee --- /dev/null +++ b/REPOS.local.yaml @@ -0,0 +1,236 @@ +# REPOS.local.yaml — generated local fleet manifest +# workspace_root: /Users/himanshuranjan/Documents/highlevel +# source_manifest: ../REPOS.yaml +# Regenerate from ./ghl with: go run ./cmd/genlocalmanifest +repos: + - name: clientportal-core + github_url: https://github.com/GoHighLevel/clientportal-core.git + team: platform + type: library + tags: + - vue + - vue3 + - platform + - name: ghl-agentic-workspace + github_url: https://github.com/GoHighLevel/ghl-agentic-workspace.git + team: platform + type: service + tags: + - typescript + - nestjs + - platform + - name: ghl-awesome-studio + github_url: https://github.com/GoHighLevel/ghl-awesome-studio.git + team: platform + type: frontend + tags: + - vue + - vue3 + - platform + - name: ghls-pr + github_url: https://github.com/GoHighLevel/ghls-pr.git + team: platform + type: service + tags: + - typescript + - nestjs + - platform + - name: i18n-analysis + github_url: https://github.com/GoHighLevel/i18n-analysis.git + team: platform + type: service + tags: + - javascript + - nestjs + - platform + - name: image-processing-service + github_url: https://github.com/GoHighLevel/image-processing-service.git + team: platform + type: service + tags: + - go + - platform + - name: infrastructure-as-a-code + github_url: https://github.com/GoHighLevel/infrastructure-as-a-code.git + team: platform + type: infra + tags: + - hcl + - platform + - name: MoltClaw-by-HighLevel + github_url: https://github.com/GoHighLevel/MoltClaw-by-HighLevel.git + team: platform + type: service + tags: + - typescript + - nestjs + - platform + - name: platform-backend + github_url: https://github.com/GoHighLevel/platform-backend.git + team: platform + type: service + tags: + - typescript + - nestjs + - platform + - name: platform-core + github_url: https://github.com/GoHighLevel/platform-core.git + team: platform + type: library + tags: + - typescript + - platform + - name: platform-devtools-backend + github_url: https://github.com/GoHighLevel/platform-devtools-backend.git + team: platform + type: service + tags: + - typescript + - nestjs + - platform + - name: platform-devtools-frontend + github_url: https://github.com/GoHighLevel/platform-devtools-frontend.git + team: platform + type: frontend + tags: + - typescript + - platform + - name: platform-docs + github_url: https://github.com/GoHighLevel/platform-docs.git + team: platform + type: docs + tags: + - html + - platform + - name: platform-jenkins-shared-library + github_url: https://github.com/GoHighLevel/platform-jenkins-shared-library.git + team: platform + type: library + tags: + - groovy + - platform + - name: project-orion + github_url: https://github.com/GoHighLevel/project-orion.git + team: platform + type: other + tags: + - html + - platform + - name: quality-gates + github_url: https://github.com/GoHighLevel/quality-gates.git + team: platform + type: service + tags: + - typescript + - nestjs + - platform + - name: automation-am-client-portal + github_url: https://github.com/GoHighLevel/automation-am-client-portal.git + team: revex + type: frontend + tags: + - vue + - vue3 + - revex + - name: ghl-membership-frontend + github_url: https://github.com/GoHighLevel/ghl-membership-frontend.git + team: revex + type: frontend + tags: + - typescript + - revex + - name: ghl-revex-backend + github_url: https://github.com/GoHighLevel/ghl-revex-backend.git + team: revex + type: service + tags: + - typescript + - nestjs + - revex + - name: ghl-revex-frontend + github_url: https://github.com/GoHighLevel/ghl-revex-frontend.git + team: revex + type: frontend + tags: + - vue + - vue3 + - revex + - name: membership-backend + github_url: https://github.com/GoHighLevel/membership-backend.git + team: revex + type: service + tags: + - typescript + - nestjs + - revex + - name: membership-hmi-app + github_url: https://github.com/GoHighLevel/membership-hmi-app.git + team: revex + type: frontend + tags: + - vue + - vue3 + - revex + - name: membership-hmi-preview + github_url: https://github.com/GoHighLevel/membership-hmi-preview.git + team: revex + type: frontend + tags: + - vue + - vue3 + - revex + - name: ghl-crm-frontend + github_url: https://github.com/GoHighLevel/ghl-crm-frontend.git + team: crm + type: frontend + tags: + - vue + - vue3 + - crm + - name: ghl-email-builder + github_url: https://github.com/GoHighLevel/ghl-email-builder.git + team: conversations + type: frontend + tags: + - vue + - vue3 + - conversations + - name: spm-ts + github_url: https://github.com/GoHighLevel/spm-ts.git + team: funnels + type: frontend + tags: + - vue + - vue3 + - funnels + - name: automation-workflows-frontend + github_url: https://github.com/GoHighLevel/automation-workflows-frontend.git + team: marketing + type: frontend + tags: + - typescript + - marketing + - name: marketplace-backend + github_url: https://github.com/GoHighLevel/marketplace-backend.git + team: saas + type: service + tags: + - typescript + - nestjs + - saas + - name: ai-backend + github_url: https://github.com/GoHighLevel/ai-backend.git + team: ai + type: service + tags: + - typescript + - nestjs + - ai + - name: ai-frontend + github_url: https://github.com/GoHighLevel/ai-frontend.git + team: ai + type: frontend + tags: + - vue + - vue3 + - ai diff --git a/REPOS.yaml b/REPOS.yaml new file mode 100644 index 00000000..640fd1be --- /dev/null +++ b/REPOS.yaml @@ -0,0 +1,2897 @@ +# GHL Fleet Manifest — auto-generated from GoHighLevel GitHub org +# DO NOT EDIT MANUALLY — regenerate with: scripts/generate-repos-manifest.sh +# Total active repos: 480 (archived repos excluded) + +repos: + # ──────────────────── PLATFORM ────────────────────── + - name: a11y-injector + github_url: https://github.com/GoHighLevel/a11y-injector.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: api-documentation + github_url: https://github.com/GoHighLevel/api-documentation.git + team: platform + type: docs + tags: [typescript, platform] + + - name: api-framework + github_url: https://github.com/GoHighLevel/api-framework.git + team: platform + type: library + tags: [typescript, platform] + + - name: api-gateway + github_url: https://github.com/GoHighLevel/api-gateway.git + team: platform + type: service + tags: [csharp, platform] + + - name: ARTS + github_url: https://github.com/GoHighLevel/ARTS.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: backstage + github_url: https://github.com/GoHighLevel/backstage.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: branch-test-repo + github_url: https://github.com/GoHighLevel/branch-test-repo.git + team: platform + type: tests + tags: [testing, platform] + + - name: bugzy-lab + github_url: https://github.com/GoHighLevel/bugzy-lab.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: Build-settings + github_url: https://github.com/GoHighLevel/Build-settings.git + team: platform + type: other + tags: [lua, platform] + + - name: canary-flow + github_url: https://github.com/GoHighLevel/canary-flow.git + team: platform + type: other + tags: [platform] + + - name: cbr + github_url: https://github.com/GoHighLevel/cbr.git + team: platform + type: other + tags: [platform] + + - name: clientportal-core + github_url: https://github.com/GoHighLevel/clientportal-core.git + team: platform + type: library + tags: [vue, vue3, platform] + + - name: cloud-functions + github_url: https://github.com/GoHighLevel/cloud-functions.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: code-coverage + github_url: https://github.com/GoHighLevel/code-coverage.git + team: platform + type: other + tags: [platform] + + - name: colorcounter + github_url: https://github.com/GoHighLevel/colorcounter.git + team: platform + type: other + tags: [dart, platform] + + - name: context-layer + github_url: https://github.com/GoHighLevel/context-layer.git + team: platform + type: service + tags: [python, platform] + + - name: Continuum + github_url: https://github.com/GoHighLevel/Continuum.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: critical-endpoints-servers + github_url: https://github.com/GoHighLevel/critical-endpoints-servers.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: crud-test + github_url: https://github.com/GoHighLevel/crud-test.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: csv-xls-exporter + github_url: https://github.com/GoHighLevel/csv-xls-exporter.git + team: platform + type: other + tags: [platform] + + - name: custom-widgets-price-banner + github_url: https://github.com/GoHighLevel/custom-widgets-price-banner.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: Customer_Success_Transcription_App_V2 + github_url: https://github.com/GoHighLevel/Customer_Success_Transcription_App_V2.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: Customer_Support_Transcription_App_V2 + github_url: https://github.com/GoHighLevel/Customer_Support_Transcription_App_V2.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: debounce-service + github_url: https://github.com/GoHighLevel/debounce-service.git + team: platform + type: service + tags: [python, platform] + + - name: deployment-bot + github_url: https://github.com/GoHighLevel/deployment-bot.git + team: platform + type: infra + tags: [shell, platform] + + - name: dev-charon + github_url: https://github.com/GoHighLevel/dev-charon.git + team: platform + type: service + tags: [go, platform] + + - name: dev-charon-assets-viewer + github_url: https://github.com/GoHighLevel/dev-charon-assets-viewer.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: dev-commerce-applications + github_url: https://github.com/GoHighLevel/dev-commerce-applications.git + team: platform + type: frontend + tags: [go, platform] + + - name: dev-commerce-documentx + github_url: https://github.com/GoHighLevel/dev-commerce-documentx.git + team: platform + type: service + tags: [go, platform] + + - name: dev-commerce-engine + github_url: https://github.com/GoHighLevel/dev-commerce-engine.git + team: platform + type: service + tags: [go, platform] + + - name: dev-commerce-frontend + github_url: https://github.com/GoHighLevel/dev-commerce-frontend.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: dev-commerce-img-optimiser + github_url: https://github.com/GoHighLevel/dev-commerce-img-optimiser.git + team: platform + type: other + tags: [c, platform] + + - name: dev-commerce-ledgerx + github_url: https://github.com/GoHighLevel/dev-commerce-ledgerx.git + team: platform + type: service + tags: [go, platform] + + - name: dev-commerce-merchantx + github_url: https://github.com/GoHighLevel/dev-commerce-merchantx.git + team: platform + type: service + tags: [go, platform] + + - name: dev-commerce-ppc + github_url: https://github.com/GoHighLevel/dev-commerce-ppc.git + team: platform + type: service + tags: [go, platform] + + - name: dev-commerce-proto + github_url: https://github.com/GoHighLevel/dev-commerce-proto.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: dev-commerce-transaction-forensics + github_url: https://github.com/GoHighLevel/dev-commerce-transaction-forensics.git + team: platform + type: service + tags: [go, platform] + + - name: dev-conventions + github_url: https://github.com/GoHighLevel/dev-conventions.git + team: platform + type: other + tags: [platform] + + - name: dev-cursor-agents-manager + github_url: https://github.com/GoHighLevel/dev-cursor-agents-manager.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: dev-docs + github_url: https://github.com/GoHighLevel/dev-docs.git + team: platform + type: docs + tags: [platform] + + - name: dev-mobcom-fsb-dashboard + github_url: https://github.com/GoHighLevel/dev-mobcom-fsb-dashboard.git + team: platform + type: frontend + tags: [go, platform] + + - name: DevCapture + github_url: https://github.com/GoHighLevel/DevCapture.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: devlab-internal + github_url: https://github.com/GoHighLevel/devlab-internal.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: disassemble-batch + github_url: https://github.com/GoHighLevel/disassemble-batch.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: docker-nginx-auto-ssl + github_url: https://github.com/GoHighLevel/docker-nginx-auto-ssl.git + team: platform + type: infra + tags: [shell, platform] + + - name: document-chrome-extension + github_url: https://github.com/GoHighLevel/document-chrome-extension.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: documents-contracts-rich-text-mvp + github_url: https://github.com/GoHighLevel/documents-contracts-rich-text-mvp.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: electron-push-receiver + github_url: https://github.com/GoHighLevel/electron-push-receiver.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: email-builder-service + github_url: https://github.com/GoHighLevel/email-builder-service.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: email-builder-tools + github_url: https://github.com/GoHighLevel/email-builder-tools.git + team: platform + type: tooling + tags: [javascript, platform] + + - name: engram + github_url: https://github.com/GoHighLevel/engram.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ent-reports + github_url: https://github.com/GoHighLevel/ent-reports.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: events-backend + github_url: https://github.com/GoHighLevel/events-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: events-frontend + github_url: https://github.com/GoHighLevel/events-frontend.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: fd-test + github_url: https://github.com/GoHighLevel/fd-test.git + team: platform + type: tests + tags: [vue, vue3, testing, platform] + + - name: figma-importer-plugin + github_url: https://github.com/GoHighLevel/figma-importer-plugin.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: FigmaJSONtoComponent + github_url: https://github.com/GoHighLevel/FigmaJSONtoComponent.git + team: platform + type: other + tags: [platform] + + - name: firestore-rules + github_url: https://github.com/GoHighLevel/firestore-rules.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: flutter-ffmpeg-kit + github_url: https://github.com/GoHighLevel/flutter-ffmpeg-kit.git + team: platform + type: other + tags: [c, platform] + + - name: flutter-layrkit + github_url: https://github.com/GoHighLevel/flutter-layrkit.git + team: platform + type: other + tags: [dart, platform] + + - name: flutter-official-packages + github_url: https://github.com/GoHighLevel/flutter-official-packages.git + team: platform + type: library + tags: [platform] + + - name: flutter_html + github_url: https://github.com/GoHighLevel/flutter_html.git + team: platform + type: other + tags: [dart, platform] + + - name: flutter_icon54 + github_url: https://github.com/GoHighLevel/flutter_icon54.git + team: platform + type: other + tags: [dart, platform] + + - name: flutter_launcher_icons + github_url: https://github.com/GoHighLevel/flutter_launcher_icons.git + team: platform + type: other + tags: [dart, platform] + + - name: flutter_native_splash + github_url: https://github.com/GoHighLevel/flutter_native_splash.git + team: platform + type: other + tags: [platform] + + - name: flutter_untitled_ui_icons + github_url: https://github.com/GoHighLevel/flutter_untitled_ui_icons.git + team: platform + type: other + tags: [dart, platform] + + - name: freshdesk-indexer-ts + github_url: https://github.com/GoHighLevel/freshdesk-indexer-ts.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: freshdesk-indexer-ts-v2 + github_url: https://github.com/GoHighLevel/freshdesk-indexer-ts-v2.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: frontend-codemods + github_url: https://github.com/GoHighLevel/frontend-codemods.git + team: platform + type: other + tags: [platform] + + - name: frontend-debugger + github_url: https://github.com/GoHighLevel/frontend-debugger.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: frontend-memory-leaks + github_url: https://github.com/GoHighLevel/frontend-memory-leaks.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: frontend-performance-utils + github_url: https://github.com/GoHighLevel/frontend-performance-utils.git + team: platform + type: library + tags: [typescript, platform] + + - name: frontend-utils + github_url: https://github.com/GoHighLevel/frontend-utils.git + team: platform + type: library + tags: [platform] + + - name: ghl-agentic-workspace + github_url: https://github.com/GoHighLevel/ghl-agentic-workspace.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-api-collection + github_url: https://github.com/GoHighLevel/ghl-api-collection.git + team: platform + type: service + tags: [platform] + + - name: ghl-auth3 + github_url: https://github.com/GoHighLevel/ghl-auth3.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-awesome-onboarding + github_url: https://github.com/GoHighLevel/ghl-awesome-onboarding.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-awesome-studio + github_url: https://github.com/GoHighLevel/ghl-awesome-studio.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-backend-repo-template + github_url: https://github.com/GoHighLevel/ghl-backend-repo-template.git + team: platform + type: service + tags: [dockerfile, platform] + + - name: ghl-brand-boards + github_url: https://github.com/GoHighLevel/ghl-brand-boards.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-browser-mcp + github_url: https://github.com/GoHighLevel/ghl-browser-mcp.git + team: platform + type: service + tags: [javascript, nestjs, mcp, platform] + + - name: ghl-bulk-request + github_url: https://github.com/GoHighLevel/ghl-bulk-request.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-codebase-mcp + github_url: https://github.com/GoHighLevel/ghl-codebase-mcp.git + team: platform + type: library + tags: [go, mcp, platform] + + - name: ghl-context-builder + github_url: https://github.com/GoHighLevel/ghl-context-builder.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: ghl-ctk-date-time-picker + github_url: https://github.com/GoHighLevel/ghl-ctk-date-time-picker.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-cursor-rules + github_url: https://github.com/GoHighLevel/ghl-cursor-rules.git + team: platform + type: other + tags: [platform] + + - name: ghl-cursor-skills + github_url: https://github.com/GoHighLevel/ghl-cursor-skills.git + team: platform + type: other + tags: [platform] + + - name: ghl-cursor-skills-mcp + github_url: https://github.com/GoHighLevel/ghl-cursor-skills-mcp.git + team: platform + type: service + tags: [typescript, nestjs, mcp, platform] + + - name: GHL-Design-Memory + github_url: https://github.com/GoHighLevel/GHL-Design-Memory.git + team: platform + type: service + tags: [python, platform] + + - name: ghl-desktop-app + github_url: https://github.com/GoHighLevel/ghl-desktop-app.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: ghl-docs-hub + github_url: https://github.com/GoHighLevel/ghl-docs-hub.git + team: platform + type: docs + tags: [typescript, platform] + + - name: ghl-electron-desktop-apps-test + github_url: https://github.com/GoHighLevel/ghl-electron-desktop-apps-test.git + team: platform + type: frontend + tags: [testing, platform] + + - name: ghl-external-tracking + github_url: https://github.com/GoHighLevel/ghl-external-tracking.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-federation-dashboard + github_url: https://github.com/GoHighLevel/ghl-federation-dashboard.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-github-pr-dashboard + github_url: https://github.com/GoHighLevel/ghl-github-pr-dashboard.git + team: platform + type: frontend + tags: [javascript, platform] + + - name: ghl-helm-charts + github_url: https://github.com/GoHighLevel/ghl-helm-charts.git + team: platform + type: infra + tags: [smarty, platform] + + - name: ghl-i18n-feedback + github_url: https://github.com/GoHighLevel/ghl-i18n-feedback.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-icons + github_url: https://github.com/GoHighLevel/ghl-icons.git + team: platform + type: other + tags: [shell, platform] + + - name: ghl-image-py + github_url: https://github.com/GoHighLevel/ghl-image-py.git + team: platform + type: service + tags: [python, platform] + + - name: ghl-isv-app + github_url: https://github.com/GoHighLevel/ghl-isv-app.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-kollab-ci-certificates + github_url: https://github.com/GoHighLevel/ghl-kollab-ci-certificates.git + team: platform + type: other + tags: [platform] + + - name: ghl-leadgen-countdowntimer + github_url: https://github.com/GoHighLevel/ghl-leadgen-countdowntimer.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-leadgen-frontend + github_url: https://github.com/GoHighLevel/ghl-leadgen-frontend.git + team: platform + type: frontend + tags: [platform] + + - name: ghl-liquibase + github_url: https://github.com/GoHighLevel/ghl-liquibase.git + team: platform + type: other + tags: [shell, platform] + + - name: ghl-localisation-v2 + github_url: https://github.com/GoHighLevel/ghl-localisation-v2.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-localization + github_url: https://github.com/GoHighLevel/ghl-localization.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-magic-studio + github_url: https://github.com/GoHighLevel/ghl-magic-studio.git + team: platform + type: other + tags: [dockerfile, platform] + + - name: ghl-manifest-viewer + github_url: https://github.com/GoHighLevel/ghl-manifest-viewer.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: ghl-mcp-server + github_url: https://github.com/GoHighLevel/ghl-mcp-server.git + team: platform + type: service + tags: [typescript, nestjs, mcp, platform] + + - name: ghl-media-center + github_url: https://github.com/GoHighLevel/ghl-media-center.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-mobile-app-customiser + github_url: https://github.com/GoHighLevel/ghl-mobile-app-customiser.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-mobile-ci-certificates + github_url: https://github.com/GoHighLevel/ghl-mobile-ci-certificates.git + team: platform + type: other + tags: [platform] + + - name: ghl-module-federation-plugin + github_url: https://github.com/GoHighLevel/ghl-module-federation-plugin.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-monorepo-boilerplate + github_url: https://github.com/GoHighLevel/ghl-monorepo-boilerplate.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-moz-header + github_url: https://github.com/GoHighLevel/ghl-moz-header.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: ghl-nestjs-boilerplate + github_url: https://github.com/GoHighLevel/ghl-nestjs-boilerplate.git + team: platform + type: other + tags: [platform] + + - name: ghl-ofa + github_url: https://github.com/GoHighLevel/ghl-ofa.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-operations + github_url: https://github.com/GoHighLevel/ghl-operations.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: ghl-pam-logging + github_url: https://github.com/GoHighLevel/ghl-pam-logging.git + team: platform + type: other + tags: [platform] + + - name: ghl-pdf-compliance + github_url: https://github.com/GoHighLevel/ghl-pdf-compliance.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-plugins + github_url: https://github.com/GoHighLevel/ghl-plugins.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: ghl-poc + github_url: https://github.com/GoHighLevel/ghl-poc.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-pr-ops + github_url: https://github.com/GoHighLevel/ghl-pr-ops.git + team: platform + type: other + tags: [platform] + + - name: ghl-pr-tracker + github_url: https://github.com/GoHighLevel/ghl-pr-tracker.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-proposals + github_url: https://github.com/GoHighLevel/ghl-proposals.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-public-apis + github_url: https://github.com/GoHighLevel/ghl-public-apis.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-public-library-ssr + github_url: https://github.com/GoHighLevel/ghl-public-library-ssr.git + team: platform + type: library + tags: [vue, vue3, platform] + + - name: ghl-qr-code + github_url: https://github.com/GoHighLevel/ghl-qr-code.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: ghl-qr-server + github_url: https://github.com/GoHighLevel/ghl-qr-server.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-rbac-test-suite + github_url: https://github.com/GoHighLevel/ghl-rbac-test-suite.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: ghl-repoatlas + github_url: https://github.com/GoHighLevel/ghl-repoatlas.git + team: platform + type: service + tags: [python, platform] + + - name: ghl-route-registry + github_url: https://github.com/GoHighLevel/ghl-route-registry.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-sdk-examples + github_url: https://github.com/GoHighLevel/ghl-sdk-examples.git + team: platform + type: library + tags: [html, platform] + + - name: ghl-sdk-generator + github_url: https://github.com/GoHighLevel/ghl-sdk-generator.git + team: platform + type: library + tags: [handlebars, platform] + + - name: ghl-seo-app + github_url: https://github.com/GoHighLevel/ghl-seo-app.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: ghl-ssr-boilerplate + github_url: https://github.com/GoHighLevel/ghl-ssr-boilerplate.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-template-library + github_url: https://github.com/GoHighLevel/ghl-template-library.git + team: platform + type: library + tags: [typescript, platform] + + - name: ghl-test-management + github_url: https://github.com/GoHighLevel/ghl-test-management.git + team: platform + type: tests + tags: [testing, platform] + + - name: ghl-test-platform + github_url: https://github.com/GoHighLevel/ghl-test-platform.git + team: platform + type: tests + tags: [vue, vue3, testing, platform] + + - name: ghl-text-editor + github_url: https://github.com/GoHighLevel/ghl-text-editor.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ghl-tourguide + github_url: https://github.com/GoHighLevel/ghl-tourguide.git + team: platform + type: docs + tags: [typescript, platform] + + - name: ghl-ui + github_url: https://github.com/GoHighLevel/ghl-ui.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: ghl-v2-api-docs + github_url: https://github.com/GoHighLevel/ghl-v2-api-docs.git + team: platform + type: service + tags: [platform] + + - name: ghl-widgets + github_url: https://github.com/GoHighLevel/ghl-widgets.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: ghl_evalcore + github_url: https://github.com/GoHighLevel/ghl_evalcore.git + team: platform + type: service + tags: [typescript, nestjs, testing, platform] + + - name: ghl_vision_flutter + github_url: https://github.com/GoHighLevel/ghl_vision_flutter.git + team: platform + type: other + tags: [dart, platform] + + - name: ghls-pr + github_url: https://github.com/GoHighLevel/ghls-pr.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: giscus-ghl + github_url: https://github.com/GoHighLevel/giscus-ghl.git + team: platform + type: other + tags: [platform] + + - name: git-jenkins-mcp + github_url: https://github.com/GoHighLevel/git-jenkins-mcp.git + team: platform + type: infra + tags: [typescript, mcp, platform] + + - name: github-actions + github_url: https://github.com/GoHighLevel/github-actions.git + team: platform + type: other + tags: [dockerfile, platform] + + - name: github-digest + github_url: https://github.com/GoHighLevel/github-digest.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: go-platform + github_url: https://github.com/GoHighLevel/go-platform.git + team: platform + type: service + tags: [go, platform] + + - name: go-platform-core + github_url: https://github.com/GoHighLevel/go-platform-core.git + team: platform + type: library + tags: [go, platform] + + - name: GoHighLevel + github_url: https://github.com/GoHighLevel/GoHighLevel.git + team: platform + type: other + tags: [platform] + + - name: grafana-report-generator + github_url: https://github.com/GoHighLevel/grafana-report-generator.git + team: platform + type: tooling + tags: [platform] + + - name: gsd-ghl + github_url: https://github.com/GoHighLevel/gsd-ghl.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: high-rise-flutter-colors + github_url: https://github.com/GoHighLevel/high-rise-flutter-colors.git + team: platform + type: other + tags: [dart, platform] + + - name: high_canopy + github_url: https://github.com/GoHighLevel/high_canopy.git + team: platform + type: other + tags: [dart, platform] + + - name: highlevel-api-docs + github_url: https://github.com/GoHighLevel/highlevel-api-docs.git + team: platform + type: service + tags: [platform] + + - name: highlevel-api-php + github_url: https://github.com/GoHighLevel/highlevel-api-php.git + team: platform + type: service + tags: [php, platform] + + - name: highlevel-api-python + github_url: https://github.com/GoHighLevel/highlevel-api-python.git + team: platform + type: service + tags: [python, platform] + + - name: highlevel-api-sdk + github_url: https://github.com/GoHighLevel/highlevel-api-sdk.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highlevel-api-sdk-private + github_url: https://github.com/GoHighLevel/highlevel-api-sdk-private.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highlevel-flutter + github_url: https://github.com/GoHighLevel/highlevel-flutter.git + team: platform + type: other + tags: [dart, platform] + + - name: highlevel-functions + github_url: https://github.com/GoHighLevel/highlevel-functions.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highlevel-functions-temp + github_url: https://github.com/GoHighLevel/highlevel-functions-temp.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highlevel-functions-utils + github_url: https://github.com/GoHighLevel/highlevel-functions-utils.git + team: platform + type: library + tags: [platform] + + - name: highlevel-functions-v2 + github_url: https://github.com/GoHighLevel/highlevel-functions-v2.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highlevel-functions-v3 + github_url: https://github.com/GoHighLevel/highlevel-functions-v3.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highlevel-html + github_url: https://github.com/GoHighLevel/highlevel-html.git + team: platform + type: other + tags: [html, platform] + + - name: highlevel-infrastructure + github_url: https://github.com/GoHighLevel/highlevel-infrastructure.git + team: platform + type: infra + tags: [lua, platform] + + - name: highlevel-jenkins-shared-libs + github_url: https://github.com/GoHighLevel/highlevel-jenkins-shared-libs.git + team: platform + type: library + tags: [platform] + + - name: highlevel-scraper + github_url: https://github.com/GoHighLevel/highlevel-scraper.git + team: platform + type: service + tags: [python, platform] + + - name: highlevel.handbook.github.io + github_url: https://github.com/GoHighLevel/highlevel.handbook.github.io.git + team: platform + type: other + tags: [html, platform] + + - name: highrise-figmagic + github_url: https://github.com/GoHighLevel/highrise-figmagic.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: highrise-flutter + github_url: https://github.com/GoHighLevel/highrise-flutter.git + team: platform + type: other + tags: [dart, platform] + + - name: highrise-next + github_url: https://github.com/GoHighLevel/highrise-next.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: highrise-nuxt-v3-v4 + github_url: https://github.com/GoHighLevel/highrise-nuxt-v3-v4.git + team: platform + type: frontend + tags: [vue, vue3, nuxt3, platform] + + - name: HighRise-Tokens + github_url: https://github.com/GoHighLevel/HighRise-Tokens.git + team: platform + type: service + tags: [python, platform] + + - name: HighSupply + github_url: https://github.com/GoHighLevel/HighSupply.git + team: platform + type: other + tags: [dart, platform] + + - name: hist + github_url: https://github.com/GoHighLevel/hist.git + team: platform + type: other + tags: [dockerfile, platform] + + - name: hl-base-utils + github_url: https://github.com/GoHighLevel/hl-base-utils.git + team: platform + type: library + tags: [typescript, platform] + + - name: hl-test-manager + github_url: https://github.com/GoHighLevel/hl-test-manager.git + team: platform + type: tests + tags: [vue, vue3, testing, platform] + + - name: hl-utils + github_url: https://github.com/GoHighLevel/hl-utils.git + team: platform + type: library + tags: [typescript, platform] + + - name: hubspot-importer + github_url: https://github.com/GoHighLevel/hubspot-importer.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: hubspot-importer-poc + github_url: https://github.com/GoHighLevel/hubspot-importer-poc.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: hugo-book + github_url: https://github.com/GoHighLevel/hugo-book.git + team: platform + type: other + tags: [html, platform] + + - name: I18_Translations_Detection_Plugin + github_url: https://github.com/GoHighLevel/I18_Translations_Detection_Plugin.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: i18n-analysis + github_url: https://github.com/GoHighLevel/i18n-analysis.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: i18n-as-a-service + github_url: https://github.com/GoHighLevel/i18n-as-a-service.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: i18n-test + github_url: https://github.com/GoHighLevel/i18n-test.git + team: platform + type: tests + tags: [vue, vue3, testing, platform] + + - name: i18n-validator + github_url: https://github.com/GoHighLevel/i18n-validator.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: ideas-board-vis-frontend + github_url: https://github.com/GoHighLevel/ideas-board-vis-frontend.git + team: platform + type: frontend + tags: [html, platform] + + - name: image-processing-service + github_url: https://github.com/GoHighLevel/image-processing-service.git + team: platform + type: service + tags: [go, platform] + + - name: infra-q2 + github_url: https://github.com/GoHighLevel/infra-q2.git + team: platform + type: other + tags: [platform] + + - name: infrastructure-as-a-code + github_url: https://github.com/GoHighLevel/infrastructure-as-a-code.git + team: platform + type: infra + tags: [hcl, platform] + + - name: instagram-webhook-native-posts + github_url: https://github.com/GoHighLevel/instagram-webhook-native-posts.git + team: platform + type: frontend + tags: [javascript, platform] + + - name: internal-api-documentation + github_url: https://github.com/GoHighLevel/internal-api-documentation.git + team: platform + type: service + tags: [platform] + + - name: internaltools-migrations + github_url: https://github.com/GoHighLevel/internaltools-migrations.git + team: platform + type: tooling + tags: [typescript, platform] + + - name: isv-monitoring-service + github_url: https://github.com/GoHighLevel/isv-monitoring-service.git + team: platform + type: service + tags: [platform] + + - name: Jobber-App-React + github_url: https://github.com/GoHighLevel/Jobber-App-React.git + team: platform + type: frontend + tags: [platform] + + - name: kubernetes-mixin + github_url: https://github.com/GoHighLevel/kubernetes-mixin.git + team: platform + type: other + tags: [platform] + + - name: langflow + github_url: https://github.com/GoHighLevel/langflow.git + team: platform + type: service + tags: [python, platform] + + - name: langfuse + github_url: https://github.com/GoHighLevel/langfuse.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: langfuse-region-migration + github_url: https://github.com/GoHighLevel/langfuse-region-migration.git + team: platform + type: tooling + tags: [python, platform] + + - name: lead-tracker + github_url: https://github.com/GoHighLevel/lead-tracker.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: leadgen-ad-publishing-frontend + github_url: https://github.com/GoHighLevel/leadgen-ad-publishing-frontend.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: leadgen-admin + github_url: https://github.com/GoHighLevel/leadgen-admin.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: leadgen-backend + github_url: https://github.com/GoHighLevel/leadgen-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: leadgen-backend-python + github_url: https://github.com/GoHighLevel/leadgen-backend-python.git + team: platform + type: service + tags: [python, platform] + + - name: leadgen-cache-server + github_url: https://github.com/GoHighLevel/leadgen-cache-server.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: leadgen-customer-access-center + github_url: https://github.com/GoHighLevel/leadgen-customer-access-center.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: leadgen-fastpaydirect-static + github_url: https://github.com/GoHighLevel/leadgen-fastpaydirect-static.git + team: platform + type: other + tags: [html, platform] + + - name: leadgen-ipinfo + github_url: https://github.com/GoHighLevel/leadgen-ipinfo.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: leadgen-kaizen-backend + github_url: https://github.com/GoHighLevel/leadgen-kaizen-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: leadgen-loyalty-frontend + github_url: https://github.com/GoHighLevel/leadgen-loyalty-frontend.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: leadgen-store-frontend + github_url: https://github.com/GoHighLevel/leadgen-store-frontend.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: leadgen-tests + github_url: https://github.com/GoHighLevel/leadgen-tests.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: lighthouse-worker + github_url: https://github.com/GoHighLevel/lighthouse-worker.git + team: platform + type: service + tags: [typescript, nestjs, worker, platform] + + - name: localization-lib + github_url: https://github.com/GoHighLevel/localization-lib.git + team: platform + type: library + tags: [javascript, platform] + + - name: location-prospect + github_url: https://github.com/GoHighLevel/location-prospect.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: logger-rust + github_url: https://github.com/GoHighLevel/logger-rust.git + team: platform + type: service + tags: [rust, platform] + + - name: mail_beam + github_url: https://github.com/GoHighLevel/mail_beam.git + team: platform + type: other + tags: [php, platform] + + - name: manifest + github_url: https://github.com/GoHighLevel/manifest.git + team: platform + type: other + tags: [platform] + + - name: mcpserver-rules + github_url: https://github.com/GoHighLevel/mcpserver-rules.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: mimt-proxy + github_url: https://github.com/GoHighLevel/mimt-proxy.git + team: platform + type: service + tags: [python, platform] + + - name: mobile-backend + github_url: https://github.com/GoHighLevel/mobile-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: mobile-patch-release-dispatch + github_url: https://github.com/GoHighLevel/mobile-patch-release-dispatch.git + team: platform + type: other + tags: [platform] + + - name: mobile-pipeline-auditor + github_url: https://github.com/GoHighLevel/mobile-pipeline-auditor.git + team: platform + type: infra + tags: [go, platform] + + - name: mobile-prds + github_url: https://github.com/GoHighLevel/mobile-prds.git + team: platform + type: other + tags: [css, platform] + + - name: mobile-whitelabelcustomizer-dasboard + github_url: https://github.com/GoHighLevel/mobile-whitelabelcustomizer-dasboard.git + team: platform + type: other + tags: [dart, platform] + + - name: mobile_native_app_theme + github_url: https://github.com/GoHighLevel/mobile_native_app_theme.git + team: platform + type: other + tags: [dart, platform] + + - name: Module-Federated-Code-generator + github_url: https://github.com/GoHighLevel/Module-Federated-Code-generator.git + team: platform + type: tooling + tags: [javascript, platform] + + - name: MoltClaw-by-HighLevel + github_url: https://github.com/GoHighLevel/MoltClaw-by-HighLevel.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: naive-ui + github_url: https://github.com/GoHighLevel/naive-ui.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: nginx-vod-module + github_url: https://github.com/GoHighLevel/nginx-vod-module.git + team: platform + type: service + tags: [go, platform] + + - name: nik-shivam + github_url: https://github.com/GoHighLevel/nik-shivam.git + team: platform + type: other + tags: [platform] + + - name: nodejs-logging + github_url: https://github.com/GoHighLevel/nodejs-logging.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: nodejs-logging-bunyan + github_url: https://github.com/GoHighLevel/nodejs-logging-bunyan.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: nuxt-highrise-module + github_url: https://github.com/GoHighLevel/nuxt-highrise-module.git + team: platform + type: service + tags: [typescript, nestjs, nuxt3, platform] + + - name: nuxt-highrise-ssr + github_url: https://github.com/GoHighLevel/nuxt-highrise-ssr.git + team: platform + type: service + tags: [typescript, nestjs, nuxt3, platform] + + - name: objective-builder-ui + github_url: https://github.com/GoHighLevel/objective-builder-ui.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: outscrapper-ghl + github_url: https://github.com/GoHighLevel/outscrapper-ghl.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: pdf-core-engine + github_url: https://github.com/GoHighLevel/pdf-core-engine.git + team: platform + type: library + tags: [typescript, platform] + + - name: platform-backend + github_url: https://github.com/GoHighLevel/platform-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: platform-backend-demo + github_url: https://github.com/GoHighLevel/platform-backend-demo.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: platform-common-argo-apps + github_url: https://github.com/GoHighLevel/platform-common-argo-apps.git + team: platform + type: frontend + tags: [platform] + + - name: platform-common-helm-charts + github_url: https://github.com/GoHighLevel/platform-common-helm-charts.git + team: platform + type: library + tags: [go-template, platform] + + - name: platform-core + github_url: https://github.com/GoHighLevel/platform-core.git + team: platform + type: library + tags: [typescript, platform] + + - name: platform-devtools-backend + github_url: https://github.com/GoHighLevel/platform-devtools-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: platform-devtools-frontend + github_url: https://github.com/GoHighLevel/platform-devtools-frontend.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: platform-docs + github_url: https://github.com/GoHighLevel/platform-docs.git + team: platform + type: docs + tags: [html, platform] + + - name: platform-experiments + github_url: https://github.com/GoHighLevel/platform-experiments.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: platform-frontend-backend + github_url: https://github.com/GoHighLevel/platform-frontend-backend.git + team: platform + type: service + tags: [platform] + + - name: platform-frontend-docs + github_url: https://github.com/GoHighLevel/platform-frontend-docs.git + team: platform + type: frontend + tags: [platform] + + - name: platform-frontend-playground + github_url: https://github.com/GoHighLevel/platform-frontend-playground.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: platform-infra-argo-apps + github_url: https://github.com/GoHighLevel/platform-infra-argo-apps.git + team: platform + type: frontend + tags: [platform] + + - name: platform-infra-helm-charts + github_url: https://github.com/GoHighLevel/platform-infra-helm-charts.git + team: platform + type: infra + tags: [mustache, platform] + + - name: platform-jenkins-shared-library + github_url: https://github.com/GoHighLevel/platform-jenkins-shared-library.git + team: platform + type: library + tags: [groovy, platform] + + - name: platform-planning-internal + github_url: https://github.com/GoHighLevel/platform-planning-internal.git + team: platform + type: other + tags: [shell, platform] + + - name: platform-pocs + github_url: https://github.com/GoHighLevel/platform-pocs.git + team: platform + type: service + tags: [python, platform] + + - name: platform-sample-java-app + github_url: https://github.com/GoHighLevel/platform-sample-java-app.git + team: platform + type: frontend + tags: [java, platform] + + - name: platform-sample-nodejs-app + github_url: https://github.com/GoHighLevel/platform-sample-nodejs-app.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: platform-shared-changes + github_url: https://github.com/GoHighLevel/platform-shared-changes.git + team: platform + type: library + tags: [go-template, platform] + + - name: platform-templates + github_url: https://github.com/GoHighLevel/platform-templates.git + team: platform + type: other + tags: [platform] + + - name: platform-terraform-gcp-infra + github_url: https://github.com/GoHighLevel/platform-terraform-gcp-infra.git + team: platform + type: infra + tags: [hcl, platform] + + - name: platform-terraform-gcp-modules + github_url: https://github.com/GoHighLevel/platform-terraform-gcp-modules.git + team: platform + type: infra + tags: [hcl, platform] + + - name: platform-ui + github_url: https://github.com/GoHighLevel/platform-ui.git + team: platform + type: frontend + tags: [typescript, platform] + + - name: pocketpub + github_url: https://github.com/GoHighLevel/pocketpub.git + team: platform + type: other + tags: [dart, platform] + + - name: pr-buddy + github_url: https://github.com/GoHighLevel/pr-buddy.git + team: platform + type: other + tags: [dockerfile, platform] + + - name: preference-management-frontend + github_url: https://github.com/GoHighLevel/preference-management-frontend.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: product-central + github_url: https://github.com/GoHighLevel/product-central.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: project-orion + github_url: https://github.com/GoHighLevel/project-orion.git + team: platform + type: other + tags: [html, platform] + + - name: pulse + github_url: https://github.com/GoHighLevel/pulse.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: push-docker-gcr + github_url: https://github.com/GoHighLevel/push-docker-gcr.git + team: platform + type: infra + tags: [shell, platform] + + - name: quality-gates + github_url: https://github.com/GoHighLevel/quality-gates.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: quickchart + github_url: https://github.com/GoHighLevel/quickchart.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: rca-analysis + github_url: https://github.com/GoHighLevel/rca-analysis.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: rdialr + github_url: https://github.com/GoHighLevel/rdialr.git + team: platform + type: service + tags: [go, platform] + + - name: redis-backup-cloud-function-gcp + github_url: https://github.com/GoHighLevel/redis-backup-cloud-function-gcp.git + team: platform + type: service + tags: [python, platform] + + - name: revops-mozart-transforms + github_url: https://github.com/GoHighLevel/revops-mozart-transforms.git + team: platform + type: other + tags: [platform] + + - name: revops-transcription-app + github_url: https://github.com/GoHighLevel/revops-transcription-app.git + team: platform + type: frontend + tags: [javascript, platform] + + - name: revops-transcription-app-ooh + github_url: https://github.com/GoHighLevel/revops-transcription-app-ooh.git + team: platform + type: frontend + tags: [javascript, platform] + + - name: Sandbox + github_url: https://github.com/GoHighLevel/Sandbox.git + team: platform + type: tooling + tags: [javascript, platform] + + - name: screenshot-service + github_url: https://github.com/GoHighLevel/screenshot-service.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: sdet-performance-test + github_url: https://github.com/GoHighLevel/sdet-performance-test.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: sdet-platform + github_url: https://github.com/GoHighLevel/sdet-platform.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: sdet-platform-backend + github_url: https://github.com/GoHighLevel/sdet-platform-backend.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: sdet-platform-frontend + github_url: https://github.com/GoHighLevel/sdet-platform-frontend.git + team: platform + type: frontend + tags: [vue, vue3, platform] + + - name: sdet-platform-performance-test + github_url: https://github.com/GoHighLevel/sdet-platform-performance-test.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: seed-module + github_url: https://github.com/GoHighLevel/seed-module.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: sentry + github_url: https://github.com/GoHighLevel/sentry.git + team: platform + type: other + tags: [shell, platform] + + - name: single-endpoint-get-by-id-servers + github_url: https://github.com/GoHighLevel/single-endpoint-get-by-id-servers.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: single-endpoint-servers + github_url: https://github.com/GoHighLevel/single-endpoint-servers.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: sonarcloud-test-repo-public + github_url: https://github.com/GoHighLevel/sonarcloud-test-repo-public.git + team: platform + type: tests + tags: [testing, platform] + + - name: sonarqube-jenkins-test + github_url: https://github.com/GoHighLevel/sonarqube-jenkins-test.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: sonarqube-jenkins-test-2 + github_url: https://github.com/GoHighLevel/sonarqube-jenkins-test-2.git + team: platform + type: tests + tags: [typescript, testing, platform] + + - name: Squire + github_url: https://github.com/GoHighLevel/Squire.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: sravanth-docs + github_url: https://github.com/GoHighLevel/sravanth-docs.git + team: platform + type: docs + tags: [html, platform] + + - name: ssl-clerk + github_url: https://github.com/GoHighLevel/ssl-clerk.git + team: platform + type: service + tags: [python, platform] + + - name: supportAILabs + github_url: https://github.com/GoHighLevel/supportAILabs.git + team: platform + type: other + tags: [platform] + + - name: test-repo + github_url: https://github.com/GoHighLevel/test-repo.git + team: platform + type: tests + tags: [testing, platform] + + - name: TPRA + github_url: https://github.com/GoHighLevel/TPRA.git + team: platform + type: other + tags: [platform] + + - name: traffic-cop + github_url: https://github.com/GoHighLevel/traffic-cop.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: ui-ux-gap-analysis + github_url: https://github.com/GoHighLevel/ui-ux-gap-analysis.git + team: platform + type: other + tags: [platform] + + - name: update-recent-message-service + github_url: https://github.com/GoHighLevel/update-recent-message-service.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: utils + github_url: https://github.com/GoHighLevel/utils.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: vibe-builder + github_url: https://github.com/GoHighLevel/vibe-builder.git + team: platform + type: service + tags: [python, platform] + + - name: vibe-creator + github_url: https://github.com/GoHighLevel/vibe-creator.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: vibe-platform + github_url: https://github.com/GoHighLevel/vibe-platform.git + team: platform + type: service + tags: [go, platform] + + - name: video-transcoding-service + github_url: https://github.com/GoHighLevel/video-transcoding-service.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: vue-ssr-demo + github_url: https://github.com/GoHighLevel/vue-ssr-demo.git + team: platform + type: tooling + tags: [typescript, platform] + + - name: webstore-extensions + github_url: https://github.com/GoHighLevel/webstore-extensions.git + team: platform + type: other + tags: [platform] + + - name: whitelabel-customizer-frontend + github_url: https://github.com/GoHighLevel/whitelabel-customizer-frontend.git + team: platform + type: frontend + tags: [dart, platform] + + - name: wordpress-core + github_url: https://github.com/GoHighLevel/wordpress-core.git + team: platform + type: library + tags: [platform] + + - name: wordpress-uptime-monitor + github_url: https://github.com/GoHighLevel/wordpress-uptime-monitor.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: Wordpress-V2-Support + github_url: https://github.com/GoHighLevel/Wordpress-V2-Support.git + team: platform + type: service + tags: [javascript, nestjs, platform] + + - name: wordpress-widget + github_url: https://github.com/GoHighLevel/wordpress-widget.git + team: platform + type: frontend + tags: [javascript, platform] + + - name: wordpress_plugins + github_url: https://github.com/GoHighLevel/wordpress_plugins.git + team: platform + type: other + tags: [php, platform] + + - name: yarn-poc + github_url: https://github.com/GoHighLevel/yarn-poc.git + team: platform + type: other + tags: [platform] + + - name: yarn-v4-nest-poc + github_url: https://github.com/GoHighLevel/yarn-v4-nest-poc.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + - name: zoom-scribe + github_url: https://github.com/GoHighLevel/zoom-scribe.git + team: platform + type: service + tags: [typescript, nestjs, platform] + + # ──────────────────── REVEX ───────────────────────── + - name: assets-drm-client + github_url: https://github.com/GoHighLevel/assets-drm-client.git + team: revex + type: library + tags: [vue, vue3, revex] + + - name: automation-am-client-portal + github_url: https://github.com/GoHighLevel/automation-am-client-portal.git + team: revex + type: frontend + tags: [vue, vue3, revex] + + - name: communities-flutter-poc + github_url: https://github.com/GoHighLevel/communities-flutter-poc.git + team: revex + type: other + tags: [dart, revex] + + - name: ghl-membership-frontend + github_url: https://github.com/GoHighLevel/ghl-membership-frontend.git + team: revex + type: frontend + tags: [typescript, revex] + + - name: ghl-revex-backend + github_url: https://github.com/GoHighLevel/ghl-revex-backend.git + team: revex + type: service + tags: [typescript, nestjs, revex] + + - name: ghl-revex-clientportal-apps + github_url: https://github.com/GoHighLevel/ghl-revex-clientportal-apps.git + team: revex + type: frontend + tags: [revex] + + - name: ghl-revex-frontend + github_url: https://github.com/GoHighLevel/ghl-revex-frontend.git + team: revex + type: frontend + tags: [vue, vue3, revex] + + - name: ghl-revex-interviews + github_url: https://github.com/GoHighLevel/ghl-revex-interviews.git + team: revex + type: service + tags: [typescript, nestjs, revex] + + - name: ghl-revex-membership-frontend + github_url: https://github.com/GoHighLevel/ghl-revex-membership-frontend.git + team: revex + type: frontend + tags: [javascript, revex] + + - name: membership-backend + github_url: https://github.com/GoHighLevel/membership-backend.git + team: revex + type: service + tags: [typescript, nestjs, revex] + + - name: membership-flutter-app + github_url: https://github.com/GoHighLevel/membership-flutter-app.git + team: revex + type: frontend + tags: [dart, revex] + + - name: membership-highline + github_url: https://github.com/GoHighLevel/membership-highline.git + team: revex + type: other + tags: [dart, revex] + + - name: membership-hmi-app + github_url: https://github.com/GoHighLevel/membership-hmi-app.git + team: revex + type: frontend + tags: [vue, vue3, revex] + + - name: membership-hmi-preview + github_url: https://github.com/GoHighLevel/membership-hmi-preview.git + team: revex + type: frontend + tags: [vue, vue3, revex] + + - name: membership-ui-core + github_url: https://github.com/GoHighLevel/membership-ui-core.git + team: revex + type: frontend + tags: [typescript, revex] + + - name: revex-pyrw-dev-helper-chrome-ext + github_url: https://github.com/GoHighLevel/revex-pyrw-dev-helper-chrome-ext.git + team: revex + type: service + tags: [javascript, nestjs, revex] + + - name: revex-tests + github_url: https://github.com/GoHighLevel/revex-tests.git + team: revex + type: tests + tags: [typescript, testing, revex] + + - name: revex-tools-pyrw-audit-and-automation + github_url: https://github.com/GoHighLevel/revex-tools-pyrw-audit-and-automation.git + team: revex + type: tooling + tags: [javascript, revex] + + - name: revex-wordpress-internal-tools + github_url: https://github.com/GoHighLevel/revex-wordpress-internal-tools.git + team: revex + type: tooling + tags: [javascript, revex] + + - name: revex-wordpress-lc-easy-migrator + github_url: https://github.com/GoHighLevel/revex-wordpress-lc-easy-migrator.git + team: revex + type: service + tags: [javascript, nestjs, revex] + + - name: revex-wordpress-lc-easy-migrator-front-end + github_url: https://github.com/GoHighLevel/revex-wordpress-lc-easy-migrator-front-end.git + team: revex + type: frontend + tags: [vue, vue3, revex] + + - name: revex-wordpress-leadconnector-plugin + github_url: https://github.com/GoHighLevel/revex-wordpress-leadconnector-plugin.git + team: revex + type: service + tags: [javascript, nestjs, revex] + + - name: revex-wordpress-leadconnector-plugin-frontend + github_url: https://github.com/GoHighLevel/revex-wordpress-leadconnector-plugin-frontend.git + team: revex + type: frontend + tags: [vue, vue3, revex] + + - name: revex-wordpress-threatlens + github_url: https://github.com/GoHighLevel/revex-wordpress-threatlens.git + team: revex + type: service + tags: [python, revex] + + - name: RevexMobileTestAutomation + github_url: https://github.com/GoHighLevel/RevexMobileTestAutomation.git + team: revex + type: tests + tags: [javascript, testing, revex] + + # ──────────────────── CRM ─────────────────────────── + - name: appengine-local-taskqueue + github_url: https://github.com/GoHighLevel/appengine-local-taskqueue.git + team: crm + type: service + tags: [javascript, nestjs, worker, crm] + + - name: chrome-ext-crm + github_url: https://github.com/GoHighLevel/chrome-ext-crm.git + team: crm + type: service + tags: [javascript, nestjs, crm] + + - name: core-crm-tests + github_url: https://github.com/GoHighLevel/core-crm-tests.git + team: crm + type: tests + tags: [typescript, testing, crm] + + - name: crm-common-libs + github_url: https://github.com/GoHighLevel/crm-common-libs.git + team: crm + type: library + tags: [typescript, crm] + + - name: crm-extension-privacy-policy + github_url: https://github.com/GoHighLevel/crm-extension-privacy-policy.git + team: crm + type: other + tags: [crm] + + - name: flutter_contacts + github_url: https://github.com/GoHighLevel/flutter_contacts.git + team: crm + type: other + tags: [dart, crm] + + - name: ghl-crm-frontend + github_url: https://github.com/GoHighLevel/ghl-crm-frontend.git + team: crm + type: frontend + tags: [vue, vue3, crm] + + - name: vibe-tagger + github_url: https://github.com/GoHighLevel/vibe-tagger.git + team: crm + type: service + tags: [typescript, nestjs, crm] + + # ──────────────────── CONVERSATIONS ───────────────── + - name: ghl-chat-widget + github_url: https://github.com/GoHighLevel/ghl-chat-widget.git + team: conversations + type: frontend + tags: [vue, vue3, conversations] + + - name: ghl-email-builder + github_url: https://github.com/GoHighLevel/ghl-email-builder.git + team: conversations + type: frontend + tags: [vue, vue3, conversations] + + - name: ghl-smtp-service + github_url: https://github.com/GoHighLevel/ghl-smtp-service.git + team: conversations + type: service + tags: [javascript, nestjs, conversations] + + - name: py-chatbot + github_url: https://github.com/GoHighLevel/py-chatbot.git + team: conversations + type: service + tags: [python, conversations] + + - name: revops-chatgpt-mcp-snowflake-server + github_url: https://github.com/GoHighLevel/revops-chatgpt-mcp-snowflake-server.git + team: conversations + type: service + tags: [javascript, nestjs, mcp, conversations] + + - name: whatsapp-analytics-backup-scipts + github_url: https://github.com/GoHighLevel/whatsapp-analytics-backup-scipts.git + team: conversations + type: service + tags: [python, conversations] + + # ──────────────────── CALENDARS ───────────────────── + - name: abhi_collective_calendar + github_url: https://github.com/GoHighLevel/abhi_collective_calendar.git + team: calendars + type: other + tags: [calendars] + + - name: assignment_calendar + github_url: https://github.com/GoHighLevel/assignment_calendar.git + team: calendars + type: service + tags: [typescript, nestjs, calendars] + + - name: automation-calendars-deep-links + github_url: https://github.com/GoHighLevel/automation-calendars-deep-links.git + team: calendars + type: service + tags: [java, calendars] + + - name: automation-calendars-frontend + github_url: https://github.com/GoHighLevel/automation-calendars-frontend.git + team: calendars + type: frontend + tags: [vue, vue3, calendars] + + - name: automation-calendars-frontend-monorepo + github_url: https://github.com/GoHighLevel/automation-calendars-frontend-monorepo.git + team: calendars + type: frontend + tags: [vue, vue3, calendars] + + - name: automation-calendars-preview + github_url: https://github.com/GoHighLevel/automation-calendars-preview.git + team: calendars + type: frontend + tags: [typescript, calendars] + + - name: automation-calendars-reserve-backend + github_url: https://github.com/GoHighLevel/automation-calendars-reserve-backend.git + team: calendars + type: service + tags: [typescript, nestjs, calendars] + + - name: calendars-learning-go + github_url: https://github.com/GoHighLevel/calendars-learning-go.git + team: calendars + type: other + tags: [calendars] + + - name: ghl-calendars-ai-skills + github_url: https://github.com/GoHighLevel/ghl-calendars-ai-skills.git + team: calendars + type: service + tags: [go, calendars] + + - name: ghl-calendars-platform + github_url: https://github.com/GoHighLevel/ghl-calendars-platform.git + team: calendars + type: service + tags: [go, calendars] + + - name: schedulers_dart + github_url: https://github.com/GoHighLevel/schedulers_dart.git + team: calendars + type: other + tags: [dart, calendars] + + - name: vue-tuicalendar + github_url: https://github.com/GoHighLevel/vue-tuicalendar.git + team: calendars + type: service + tags: [javascript, nestjs, calendars] + + # ──────────────────── FUNNELS ─────────────────────── + - name: builder-preview + github_url: https://github.com/GoHighLevel/builder-preview.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + - name: funnel-preview-cache + github_url: https://github.com/GoHighLevel/funnel-preview-cache.git + team: funnels + type: frontend + tags: [typescript, funnels] + + - name: ghl-blogging + github_url: https://github.com/GoHighLevel/ghl-blogging.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + - name: ghl-form-ai-studio + github_url: https://github.com/GoHighLevel/ghl-form-ai-studio.git + team: funnels + type: service + tags: [typescript, nestjs, funnels] + + - name: ghl-form-element + github_url: https://github.com/GoHighLevel/ghl-form-element.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + - name: ghl-form-embed + github_url: https://github.com/GoHighLevel/ghl-form-embed.git + team: funnels + type: service + tags: [typescript, nestjs, funnels] + + - name: ghl-form-survey + github_url: https://github.com/GoHighLevel/ghl-form-survey.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + - name: ghl-funnel-website + github_url: https://github.com/GoHighLevel/ghl-funnel-website.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + - name: leadgen-funnels-backend + github_url: https://github.com/GoHighLevel/leadgen-funnels-backend.git + team: funnels + type: service + tags: [funnels] + + - name: page-builder + github_url: https://github.com/GoHighLevel/page-builder.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + - name: spm-appengine + github_url: https://github.com/GoHighLevel/spm-appengine.git + team: funnels + type: frontend + tags: [typescript, funnels] + + - name: spm-proxy-server + github_url: https://github.com/GoHighLevel/spm-proxy-server.git + team: funnels + type: service + tags: [javascript, nestjs, funnels] + + - name: spm-ts + github_url: https://github.com/GoHighLevel/spm-ts.git + team: funnels + type: frontend + tags: [vue, vue3, funnels] + + # ──────────────────── PAYMENTS ────────────────────── + - name: affiliate-signup-page + github_url: https://github.com/GoHighLevel/affiliate-signup-page.git + team: payments + type: frontend + tags: [vue, vue3, payments] + + - name: authorize-net-playground + github_url: https://github.com/GoHighLevel/authorize-net-playground.git + team: payments + type: tooling + tags: [typescript, payments] + + - name: dev-commerce-subscriptionsx + github_url: https://github.com/GoHighLevel/dev-commerce-subscriptionsx.git + team: payments + type: other + tags: [payments] + + - name: ghl-invoice-preview + github_url: https://github.com/GoHighLevel/ghl-invoice-preview.git + team: payments + type: frontend + tags: [vue, vue3, payments] + + - name: ghl-leadgen-payments + github_url: https://github.com/GoHighLevel/ghl-leadgen-payments.git + team: payments + type: frontend + tags: [vue, vue3, payments] + + - name: ghl-payment-element + github_url: https://github.com/GoHighLevel/ghl-payment-element.git + team: payments + type: frontend + tags: [vue, vue3, payments] + + - name: ghl-payments-flutter + github_url: https://github.com/GoHighLevel/ghl-payments-flutter.git + team: payments + type: other + tags: [swift, payments] + + - name: leadgen-payment-products-backend + github_url: https://github.com/GoHighLevel/leadgen-payment-products-backend.git + team: payments + type: service + tags: [payments] + + - name: mobile-square-in-app-payments + github_url: https://github.com/GoHighLevel/mobile-square-in-app-payments.git + team: payments + type: frontend + tags: [payments] + + - name: module-stripe + github_url: https://github.com/GoHighLevel/module-stripe.git + team: payments + type: service + tags: [typescript, nestjs, payments] + + - name: payment-products-preview + github_url: https://github.com/GoHighLevel/payment-products-preview.git + team: payments + type: frontend + tags: [vue, vue3, payments] + + - name: payment-service + github_url: https://github.com/GoHighLevel/payment-service.git + team: payments + type: service + tags: [typescript, nestjs, payments] + + # ──────────────────── MARKETING ───────────────────── + - name: automation-am-external-script + github_url: https://github.com/GoHighLevel/automation-am-external-script.git + team: marketing + type: tooling + tags: [typescript, marketing] + + - name: automation-am-frontend + github_url: https://github.com/GoHighLevel/automation-am-frontend.git + team: marketing + type: frontend + tags: [vue, vue3, marketing] + + - name: automation-am-reward-fronted + github_url: https://github.com/GoHighLevel/automation-am-reward-fronted.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-apps-backend + github_url: https://github.com/GoHighLevel/automation-apps-backend.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-data-bi-platform + github_url: https://github.com/GoHighLevel/automation-data-bi-platform.git + team: marketing + type: service + tags: [python, marketing] + + - name: automation-eliza-backend + github_url: https://github.com/GoHighLevel/automation-eliza-backend.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-eliza-frontend + github_url: https://github.com/GoHighLevel/automation-eliza-frontend.git + team: marketing + type: frontend + tags: [vue, vue3, marketing] + + - name: automation-migration + github_url: https://github.com/GoHighLevel/automation-migration.git + team: marketing + type: tooling + tags: [typescript, marketing] + + - name: automation-next-apps-backend + github_url: https://github.com/GoHighLevel/automation-next-apps-backend.git + team: marketing + type: service + tags: [go, marketing] + + - name: automation-sync-engine + github_url: https://github.com/GoHighLevel/automation-sync-engine.git + team: marketing + type: other + tags: [marketing] + + - name: automation-workflows-ai + github_url: https://github.com/GoHighLevel/automation-workflows-ai.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-workflows-ai-pilot + github_url: https://github.com/GoHighLevel/automation-workflows-ai-pilot.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-workflows-backend + github_url: https://github.com/GoHighLevel/automation-workflows-backend.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-workflows-frontend + github_url: https://github.com/GoHighLevel/automation-workflows-frontend.git + team: marketing + type: frontend + tags: [typescript, marketing] + + - name: automation-workflows-iatf-ai-agent + github_url: https://github.com/GoHighLevel/automation-workflows-iatf-ai-agent.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: automation-workflows-iatf-frontend + github_url: https://github.com/GoHighLevel/automation-workflows-iatf-frontend.git + team: marketing + type: frontend + tags: [vue, vue3, marketing] + + - name: automation-workflows-ui-mcp + github_url: https://github.com/GoHighLevel/automation-workflows-ui-mcp.git + team: marketing + type: frontend + tags: [typescript, mcp, marketing] + + - name: automation-workflows-validators + github_url: https://github.com/GoHighLevel/automation-workflows-validators.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: Calender_Automation_Assignment_Daksh + github_url: https://github.com/GoHighLevel/Calender_Automation_Assignment_Daksh.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: clickup-automation + github_url: https://github.com/GoHighLevel/clickup-automation.git + team: marketing + type: other + tags: [marketing] + + - name: doc-preview + github_url: https://github.com/GoHighLevel/doc-preview.git + team: marketing + type: frontend + tags: [vue, vue3, marketing] + + - name: domain-reputation + github_url: https://github.com/GoHighLevel/domain-reputation.git + team: marketing + type: service + tags: [python, marketing] + + - name: email-preview + github_url: https://github.com/GoHighLevel/email-preview.git + team: marketing + type: frontend + tags: [vue, vue3, marketing] + + - name: ghl-mobileAutomation + github_url: https://github.com/GoHighLevel/ghl-mobileAutomation.git + team: marketing + type: service + tags: [java, marketing] + + - name: ghl-social-media-external + github_url: https://github.com/GoHighLevel/ghl-social-media-external.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: ghl-social-media-posting + github_url: https://github.com/GoHighLevel/ghl-social-media-posting.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: Gokollab-Native-Automation + github_url: https://github.com/GoHighLevel/Gokollab-Native-Automation.git + team: marketing + type: service + tags: [javascript, nestjs, marketing] + + - name: hiring-live-ai-workflows + github_url: https://github.com/GoHighLevel/hiring-live-ai-workflows.git + team: marketing + type: other + tags: [marketing] + + - name: hl-automation-project-template + github_url: https://github.com/GoHighLevel/hl-automation-project-template.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: leadgen-store-preview + github_url: https://github.com/GoHighLevel/leadgen-store-preview.git + team: marketing + type: frontend + tags: [marketing] + + - name: marketplace-app-review-agents + github_url: https://github.com/GoHighLevel/marketplace-app-review-agents.git + team: marketing + type: frontend + tags: [javascript, marketing] + + - name: private-github-workflows + github_url: https://github.com/GoHighLevel/private-github-workflows.git + team: marketing + type: service + tags: [javascript, nestjs, marketing] + + - name: revops-automation + github_url: https://github.com/GoHighLevel/revops-automation.git + team: marketing + type: service + tags: [python, marketing] + + - name: WhiteLabel_Automation + github_url: https://github.com/GoHighLevel/WhiteLabel_Automation.git + team: marketing + type: other + tags: [shell, marketing] + + - name: workflow-importers-IR-model + github_url: https://github.com/GoHighLevel/workflow-importers-IR-model.git + team: marketing + type: service + tags: [typescript, nestjs, marketing] + + - name: workflow-mcp-server + github_url: https://github.com/GoHighLevel/workflow-mcp-server.git + team: marketing + type: service + tags: [javascript, nestjs, mcp, marketing] + + # ──────────────────── PHONE ───────────────────────── + - name: flutter_libphonenumber + github_url: https://github.com/GoHighLevel/flutter_libphonenumber.git + team: phone + type: other + tags: [dart, phone] + + - name: twilio_voice_federated + github_url: https://github.com/GoHighLevel/twilio_voice_federated.git + team: phone + type: service + tags: [kotlin, phone] + + - name: voice-ai-mindcast + github_url: https://github.com/GoHighLevel/voice-ai-mindcast.git + team: phone + type: service + tags: [go, phone] + + # ──────────────────── REPORTING ───────────────────── + - name: data-dbt-analytics + github_url: https://github.com/GoHighLevel/data-dbt-analytics.git + team: reporting + type: other + tags: [reporting] + + - name: data-dbt-data-foundation + github_url: https://github.com/GoHighLevel/data-dbt-data-foundation.git + team: reporting + type: other + tags: [jupyter-notebook, reporting] + + - name: data-dbt-starburst + github_url: https://github.com/GoHighLevel/data-dbt-starburst.git + team: reporting + type: other + tags: [reporting] + + - name: data-platform-core + github_url: https://github.com/GoHighLevel/data-platform-core.git + team: reporting + type: library + tags: [java, reporting] + + - name: ghl-attribution-external-script + github_url: https://github.com/GoHighLevel/ghl-attribution-external-script.git + team: reporting + type: tooling + tags: [typescript, reporting] + + - name: leadgen-reporting-ads-backend + github_url: https://github.com/GoHighLevel/leadgen-reporting-ads-backend.git + team: reporting + type: service + tags: [python, reporting] + + - name: leadgen-reporting-ai + github_url: https://github.com/GoHighLevel/leadgen-reporting-ai.git + team: reporting + type: other + tags: [reporting] + + - name: leadgen-reporting-attribution-backend + github_url: https://github.com/GoHighLevel/leadgen-reporting-attribution-backend.git + team: reporting + type: service + tags: [typescript, nestjs, reporting] + + - name: leadgen-reporting-frontend + github_url: https://github.com/GoHighLevel/leadgen-reporting-frontend.git + team: reporting + type: frontend + tags: [vue, vue3, reporting] + + - name: leadgen-reporting-messages-backend + github_url: https://github.com/GoHighLevel/leadgen-reporting-messages-backend.git + team: reporting + type: service + tags: [typescript, nestjs, reporting] + + - name: marketplace-reporting-scripts + github_url: https://github.com/GoHighLevel/marketplace-reporting-scripts.git + team: reporting + type: tooling + tags: [javascript, reporting] + + # ──────────────────── SAAS ────────────────────────── + - name: AgencyUX + github_url: https://github.com/GoHighLevel/AgencyUX.git + team: saas + type: frontend + tags: [vue, vue3, saas] + + - name: ai-marketplace-tests + github_url: https://github.com/GoHighLevel/ai-marketplace-tests.git + team: saas + type: tests + tags: [typescript, testing, saas] + + - name: ghl-marketplace-app-template + github_url: https://github.com/GoHighLevel/ghl-marketplace-app-template.git + team: saas + type: frontend + tags: [typescript, saas] + + - name: leadgen-marketplace-backend + github_url: https://github.com/GoHighLevel/leadgen-marketplace-backend.git + team: saas + type: service + tags: [typescript, nestjs, saas] + + - name: marketplace-backend + github_url: https://github.com/GoHighLevel/marketplace-backend.git + team: saas + type: service + tags: [typescript, nestjs, saas] + + - name: marketplace-backend-demo + github_url: https://github.com/GoHighLevel/marketplace-backend-demo.git + team: saas + type: service + tags: [typescript, nestjs, saas] + + - name: marketplace-frontend + github_url: https://github.com/GoHighLevel/marketplace-frontend.git + team: saas + type: frontend + tags: [vue, vue3, saas] + + - name: saas-service + github_url: https://github.com/GoHighLevel/saas-service.git + team: saas + type: service + tags: [typescript, nestjs, saas] + + # ──────────────────── INTEGRATIONS ────────────────── + - name: highlevel-zapier + github_url: https://github.com/GoHighLevel/highlevel-zapier.git + team: integrations + type: service + tags: [javascript, nestjs, integrations] + + - name: hr-integration + github_url: https://github.com/GoHighLevel/hr-integration.git + team: integrations + type: frontend + tags: [vue, vue3, integrations] + + - name: integration-core + github_url: https://github.com/GoHighLevel/integration-core.git + team: integrations + type: library + tags: [dockerfile, integrations] + + - name: leadconnector + github_url: https://github.com/GoHighLevel/leadconnector.git + team: integrations + type: service + tags: [typescript, nestjs, integrations] + + - name: leadconnector-plugin-wordpress + github_url: https://github.com/GoHighLevel/leadconnector-plugin-wordpress.git + team: integrations + type: other + tags: [php, integrations] + + - name: oauth-demo + github_url: https://github.com/GoHighLevel/oauth-demo.git + team: integrations + type: tooling + tags: [javascript, integrations] + + # ──────────────────── AI ──────────────────────────── + - name: ai-backend + github_url: https://github.com/GoHighLevel/ai-backend.git + team: ai + type: service + tags: [typescript, nestjs, ai] + + - name: ai-employees-evals + github_url: https://github.com/GoHighLevel/ai-employees-evals.git + team: ai + type: tests + tags: [javascript, testing, ai] + + - name: ai-frontend + github_url: https://github.com/GoHighLevel/ai-frontend.git + team: ai + type: frontend + tags: [vue, vue3, ai] + + - name: ai-partners-frontend + github_url: https://github.com/GoHighLevel/ai-partners-frontend.git + team: ai + type: frontend + tags: [ai] + + - name: ai-supervisor-prototype + github_url: https://github.com/GoHighLevel/ai-supervisor-prototype.git + team: ai + type: tooling + tags: [vue, vue3, ai] + + - name: evaluations-ai-frontend + github_url: https://github.com/GoHighLevel/evaluations-ai-frontend.git + team: ai + type: frontend + tags: [vue, vue3, testing, ai] + + - name: ghl-ai-skills + github_url: https://github.com/GoHighLevel/ghl-ai-skills.git + team: ai + type: other + tags: [shell, ai] + + - name: ghl-ai-test-generator + github_url: https://github.com/GoHighLevel/ghl-ai-test-generator.git + team: ai + type: tests + tags: [javascript, testing, ai] + + - name: ghl-aip + github_url: https://github.com/GoHighLevel/ghl-aip.git + team: ai + type: other + tags: [ai] + + - name: ghl-content-ai + github_url: https://github.com/GoHighLevel/ghl-content-ai.git + team: ai + type: frontend + tags: [vue, vue3, ai] + + - name: ghl-rag-framework + github_url: https://github.com/GoHighLevel/ghl-rag-framework.git + team: ai + type: library + tags: [javascript, ai] + + - name: highlevel-employee-portal + github_url: https://github.com/GoHighLevel/highlevel-employee-portal.git + team: ai + type: frontend + tags: [vue, vue3, ai] + + - name: onboarding-fuzzy-inference + github_url: https://github.com/GoHighLevel/onboarding-fuzzy-inference.git + team: ai + type: service + tags: [typescript, nestjs, ai] + + - name: onboarding-fuzzy-inference-system + github_url: https://github.com/GoHighLevel/onboarding-fuzzy-inference-system.git + team: ai + type: other + tags: [ai] + + - name: platform-ai + github_url: https://github.com/GoHighLevel/platform-ai.git + team: ai + type: service + tags: [python, ai] + + - name: vertical-ai + github_url: https://github.com/GoHighLevel/vertical-ai.git + team: ai + type: service + tags: [typescript, nestjs, ai] + + - name: visibility-ai + github_url: https://github.com/GoHighLevel/visibility-ai.git + team: ai + type: other + tags: [ai] + + - name: zai-demo + github_url: https://github.com/GoHighLevel/zai-demo.git + team: ai + type: tooling + tags: [ai] diff --git a/cloudbuild.ghl.yaml b/cloudbuild.ghl.yaml new file mode 100644 index 00000000..c0666a00 --- /dev/null +++ b/cloudbuild.ghl.yaml @@ -0,0 +1,17 @@ +steps: + - name: 'gcr.io/cloud-builders/docker' + args: + - build + - -f + - Dockerfile.ghl + - -t + - gcr.io/$PROJECT_ID/codebase-memory-mcp-ghl:latest + - . + timeout: 1200s + +images: + - gcr.io/$PROJECT_ID/codebase-memory-mcp-ghl:latest + +options: + machineType: E2_HIGHCPU_32 + logging: CLOUD_LOGGING_ONLY diff --git a/deployments/ghl/helm/Chart.yaml b/deployments/ghl/helm/Chart.yaml new file mode 100644 index 00000000..7f7d1f63 --- /dev/null +++ b/deployments/ghl/helm/Chart.yaml @@ -0,0 +1,17 @@ +apiVersion: v2 +name: codebase-memory-mcp +description: GHL fleet server for codebase-memory-mcp — indexes all 200 GHL repos and exposes them via an HTTP MCP endpoint +type: application +version: 0.1.0 +appVersion: "1.0.0" +keywords: + - mcp + - code-intelligence + - ai + - ghl +home: https://github.com/GoHighLevel/codebase-memory-mcp +sources: + - https://github.com/GoHighLevel/codebase-memory-mcp +maintainers: + - name: platform-infra + email: platform@gohighlevel.com diff --git a/deployments/ghl/helm/templates/_helpers.tpl b/deployments/ghl/helm/templates/_helpers.tpl new file mode 100644 index 00000000..84da1556 --- /dev/null +++ b/deployments/ghl/helm/templates/_helpers.tpl @@ -0,0 +1,67 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "codebase-memory-mcp.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +*/}} +{{- define "codebase-memory-mcp.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart label. +*/}} +{{- define "codebase-memory-mcp.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels. +*/}} +{{- define "codebase-memory-mcp.labels" -}} +helm.sh/chart: {{ include "codebase-memory-mcp.chart" . }} +{{ include "codebase-memory-mcp.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels. +*/}} +{{- define "codebase-memory-mcp.selectorLabels" -}} +app.kubernetes.io/name: {{ include "codebase-memory-mcp.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +ServiceAccount name. +*/}} +{{- define "codebase-memory-mcp.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "codebase-memory-mcp.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} + +{{/* +Image tag (defaults to appVersion). +*/}} +{{- define "codebase-memory-mcp.imageTag" -}} +{{- .Values.image.tag | default .Chart.AppVersion }} +{{- end }} diff --git a/deployments/ghl/helm/templates/configmap.yaml b/deployments/ghl/helm/templates/configmap.yaml new file mode 100644 index 00000000..7319744a --- /dev/null +++ b/deployments/ghl/helm/templates/configmap.yaml @@ -0,0 +1,14 @@ +{{- if .Values.reposManifest.configMap.enabled -}} +# Optional: override REPOS.yaml from a ConfigMap instead of baking it into the image. +# Set reposManifest.configMap.enabled=true and supply the full REPOS.yaml content +# in a values override or via --set-file. +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ .Values.reposManifest.configMap.name | default (printf "%s-repos" (include "codebase-memory-mcp.fullname" .)) }} + labels: + {{- include "codebase-memory-mcp.labels" . | nindent 4 }} +data: + REPOS.yaml: | + # Populated at deploy time via --set-file or Helm values +{{- end }} diff --git a/deployments/ghl/helm/templates/deployment.yaml b/deployments/ghl/helm/templates/deployment.yaml new file mode 100644 index 00000000..1aaec306 --- /dev/null +++ b/deployments/ghl/helm/templates/deployment.yaml @@ -0,0 +1,120 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "codebase-memory-mcp.fullname" . }} + labels: + {{- include "codebase-memory-mcp.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + # StatefulSet-like: only 1 replica writing to the PVC; Recreate avoids two pods fighting over the volume + strategy: + type: Recreate + selector: + matchLabels: + {{- include "codebase-memory-mcp.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + # Restart pods when the ConfigMap changes + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + labels: + {{- include "codebase-memory-mcp.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "codebase-memory-mcp.serviceAccountName" . }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + containers: + - name: fleet + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.image.repository }}:{{ include "codebase-memory-mcp.imageTag" . }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: http + containerPort: 8080 + protocol: TCP + env: + {{- range $key, $value := .Values.env }} + - name: {{ $key }} + value: {{ $value | quote }} + {{- end }} + # Secrets from GCP Secret Manager + - name: BEARER_TOKEN + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.bearerToken.secretName }} + key: {{ .Values.secrets.bearerToken.key }} + optional: true + - name: GITHUB_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.webhookSecret.secretName }} + key: {{ .Values.secrets.webhookSecret.key }} + optional: true + - name: GITHUB_TOKEN + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.githubToken.secretName }} + key: {{ .Values.secrets.githubToken.key }} + optional: true + {{- if .Values.reposManifest.configMap.enabled }} + - name: REPOS_MANIFEST + value: /config/REPOS.yaml + {{- end }} + volumeMounts: + - name: fleet-cache + mountPath: {{ .Values.persistence.mountPath }} + {{- if .Values.reposManifest.configMap.enabled }} + - name: repos-manifest + mountPath: /config + readOnly: true + {{- end }} + {{- if .Values.githubDeployKey.enabled }} + - name: github-deploy-key + mountPath: /root/.ssh + readOnly: true + {{- end }} + livenessProbe: + {{- toYaml .Values.livenessProbe | nindent 12 }} + readinessProbe: + {{- toYaml .Values.readinessProbe | nindent 12 }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + volumes: + - name: fleet-cache + {{- if .Values.persistence.enabled }} + persistentVolumeClaim: + claimName: {{ include "codebase-memory-mcp.fullname" . }}-cache + {{- else }} + emptyDir: {} + {{- end }} + {{- if .Values.reposManifest.configMap.enabled }} + - name: repos-manifest + configMap: + name: {{ .Values.reposManifest.configMap.name | default (printf "%s-repos" (include "codebase-memory-mcp.fullname" .)) }} + {{- end }} + {{- if .Values.githubDeployKey.enabled }} + - name: github-deploy-key + secret: + secretName: {{ .Values.githubDeployKey.secretName }} + defaultMode: 0400 + {{- end }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/deployments/ghl/helm/templates/pvc.yaml b/deployments/ghl/helm/templates/pvc.yaml new file mode 100644 index 00000000..03bee522 --- /dev/null +++ b/deployments/ghl/helm/templates/pvc.yaml @@ -0,0 +1,20 @@ +{{- if .Values.persistence.enabled -}} +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ include "codebase-memory-mcp.fullname" . }}-cache + labels: + {{- include "codebase-memory-mcp.labels" . | nindent 4 }} + annotations: + # Retain the PVC even if the Helm release is deleted — the index is expensive to rebuild + helm.sh/resource-policy: keep +spec: + accessModes: + - {{ .Values.persistence.accessMode }} + {{- if .Values.persistence.storageClass }} + storageClassName: {{ .Values.persistence.storageClass }} + {{- end }} + resources: + requests: + storage: {{ .Values.persistence.size }} +{{- end }} diff --git a/deployments/ghl/helm/templates/service.yaml b/deployments/ghl/helm/templates/service.yaml new file mode 100644 index 00000000..54e7af33 --- /dev/null +++ b/deployments/ghl/helm/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "codebase-memory-mcp.fullname" . }} + labels: + {{- include "codebase-memory-mcp.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "codebase-memory-mcp.selectorLabels" . | nindent 4 }} diff --git a/deployments/ghl/helm/templates/serviceaccount.yaml b/deployments/ghl/helm/templates/serviceaccount.yaml new file mode 100644 index 00000000..868983a2 --- /dev/null +++ b/deployments/ghl/helm/templates/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "codebase-memory-mcp.serviceAccountName" . }} + labels: + {{- include "codebase-memory-mcp.labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end }} diff --git a/deployments/ghl/helm/templates/virtualservice.yaml b/deployments/ghl/helm/templates/virtualservice.yaml new file mode 100644 index 00000000..3ebc6015 --- /dev/null +++ b/deployments/ghl/helm/templates/virtualservice.yaml @@ -0,0 +1,29 @@ +{{- if .Values.virtualService.enabled -}} +apiVersion: networking.istio.io/v1beta1 +kind: VirtualService +metadata: + name: {{ include "codebase-memory-mcp.fullname" . }} + labels: + {{- include "codebase-memory-mcp.labels" . | nindent 4 }} +spec: + hosts: + - {{ .Values.virtualService.host }} + {{- if .Values.virtualService.gateway }} + gateways: + - {{ .Values.virtualService.gateway }} + {{- end }} + http: + - match: + - uri: + prefix: / + route: + - destination: + host: {{ include "codebase-memory-mcp.fullname" . }} + port: + number: {{ .Values.service.port }} + timeout: 300s # fleet indexing can take a while + retries: + attempts: 3 + perTryTimeout: 10s + retryOn: connect-failure,refused-stream,unavailable,retriable-4xx +{{- end }} diff --git a/deployments/ghl/helm/values-staging.yaml b/deployments/ghl/helm/values-staging.yaml new file mode 100644 index 00000000..3e7aec4f --- /dev/null +++ b/deployments/ghl/helm/values-staging.yaml @@ -0,0 +1,12 @@ +# values-staging.yaml — staging overrides +image: + tag: "latest" + +env: + FLEET_CONCURRENCY: "8" + INDEXER_CLIENTS: "8" + GITHUB_AUTH_ENABLED: "true" + GITHUB_ALLOWED_ORGS: "GoHighLevel" + +persistence: + size: "20Gi" diff --git a/deployments/ghl/helm/values.yaml b/deployments/ghl/helm/values.yaml new file mode 100644 index 00000000..893f6077 --- /dev/null +++ b/deployments/ghl/helm/values.yaml @@ -0,0 +1,121 @@ +# values.yaml — codebase-memory-mcp GHL fleet +# Override these in values-staging.yaml / values-production.yaml + +replicaCount: 1 + +image: + repository: gcr.io/highlevel-common-layer/codebase-memory-mcp-ghl + pullPolicy: IfNotPresent + tag: "" # defaults to .Chart.AppVersion + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +serviceAccount: + create: true + annotations: {} + name: "" + +podAnnotations: {} + +podSecurityContext: + fsGroup: 65532 # nonroot + +securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: false # SQLite writes to /data + runAsNonRoot: true + runAsUser: 65532 + capabilities: + drop: + - ALL + +service: + type: ClusterIP + port: 8080 + +# Expose via Istio VirtualService (GHL standard) +virtualService: + enabled: true + host: "codebase-memory-mcp.internal.svc.cluster.local" + gateway: "" # uses mesh by default + +ingress: + enabled: false + +resources: + limits: + cpu: "2" + memory: "4Gi" + requests: + cpu: "500m" + memory: "1Gi" + +autoscaling: + enabled: false # fleet server is stateful (PVC); don't autoscale by default + +# Persistent volume for SQLite fleet cache (~200 repos) +persistence: + enabled: true + storageClass: "standard-rwo" + size: "50Gi" + accessMode: ReadWriteOnce + mountPath: /data/fleet-cache + +# Environment — secrets injected from GCP Secret Manager via GHL secret-manager pattern +env: + PORT: "8080" + FLEET_CONCURRENCY: "8" + INDEXER_CLIENTS: "8" + CRON_INCREMENTAL: "0 */6 * * *" + CRON_FULL: "0 2 * * 0" + CBM_CACHE_DIR: "/tmp/codebase-memory-mcp" + FLEET_CACHE_DIR: "/data/fleet-cache" + REPOS_MANIFEST: "/app/REPOS.local.yaml" + +# Secrets — reference GCP Secret Manager secrets +# These are injected as env vars at runtime +secrets: + bearerToken: + secretName: "codebase-memory-mcp-bearer-token" + key: "token" + webhookSecret: + secretName: "codebase-memory-mcp-webhook-secret" + key: "secret" + githubToken: + secretName: "codebase-memory-mcp-github-token" + key: "token" + +# Optional: override REPOS.yaml via ConfigMap instead of baked image +reposManifest: + configMap: + enabled: false + name: "" + +livenessProbe: + httpGet: + path: /health + port: 8080 + initialDelaySeconds: 30 + periodSeconds: 30 + timeoutSeconds: 5 + failureThreshold: 3 + +readinessProbe: + httpGet: + path: /health + port: 8080 + initialDelaySeconds: 10 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + +nodeSelector: {} +tolerations: [] +affinity: {} + +# GitHub deploy key for private repo cloning +githubDeployKey: + enabled: false + secretName: "github-deploy-key" # SSH private key diff --git a/docs/CBM_VS_PROJECT_ORION_COMPARISON.md b/docs/CBM_VS_PROJECT_ORION_COMPARISON.md new file mode 100644 index 00000000..26c871f1 --- /dev/null +++ b/docs/CBM_VS_PROJECT_ORION_COMPARISON.md @@ -0,0 +1,325 @@ +# Codebase Memory MCP vs Project Orion + +_Prepared on April 15, 2026_ + +## Executive Summary + +This is an end-to-end implementation comparison between: + +- **Codebase Memory MCP (CBM)**: the indexing and graph-analysis engine in this repository +- **Project Orion**: the Python-based multi-repo retrieval, MCP, and LLM analysis service in `~/Documents/highlevel/project-orion` + +These systems solve related problems, but they are **not equivalent architectures**. + +- **CBM is stronger as a code intelligence engine.** + It has the better indexing core, richer graph model, native impact-analysis surface, stronger storage discipline, and much broader test coverage. +- **Project Orion is stronger as a developer-facing MCP application.** + It has the cleaner native HTTP MCP serving layer, easier local-workspace onboarding, and a more explicit retrieval-plus-LLM answer flow. +- **Neither deployment is truly multi-pod ready today.** + Both are currently implemented and configured as effectively single-writer systems. + +The correct non-biased conclusion is: + +- If the goal is **deep structural code intelligence at scale**, CBM is the stronger foundation. +- If the goal is **fast local developer enablement and a simple MCP-hosted UX**, Orion is ahead on the serving/control-plane side. + +--- + +## What Each System Really Is + +| System | What it fundamentally is | Primary implementation style | Core value | +|---|---|---|---| +| **CBM** | A graph-native code indexing engine with an MCP tool surface | C engine + Go fleet wrapper + HTTP bridge | Deep code structure, tracing, impact analysis, semantic relationships | +| **Project Orion** | A multi-repo code retrieval and LLM-analysis service with MCP + REST | Python FastAPI + FastMCP + ChromaDB/BM25 | Developer-friendly repo discovery, search, summarization, and answer generation | + +### CBM key implementation anchors + +- Fleet/server wrapper: `ghl/cmd/server/main.go` +- MCP subprocess client: `ghl/internal/mcp/client.go` +- Fleet indexing orchestration: `ghl/internal/indexer/indexer.go` +- HTTP bridge: `ghl/internal/bridge/bridge.go` +- Core indexing pipeline: `src/pipeline/pipeline.c` +- Parallel extraction pipeline: `src/pipeline/pass_parallel.c` +- MCP tool definitions and store resolution: `src/mcp/mcp.c` +- SQLite tuning and dump safety: `src/store/store.c` + +### Project Orion key implementation anchors + +- FastMCP server: `orion/mcp_server.py` +- FastAPI app: `orion/api/main.py` +- Workspace services: `orion/app_services.py` +- Retrieval pipeline: `orion/search/retriever.py` +- Context expansion: `orion/search/context_expander.py` +- LLM analysis engine: `orion/engine/query_engine.py` +- Index storage pipeline: `orion/indexer/store.py` +- Parser/scanner/embedder: `orion/indexer/parser.py`, `orion/indexer/scanner.py`, `orion/indexer/embedder.py` + +--- + +## End-to-End Architecture Comparison + +| Dimension | Codebase Memory MCP | Project Orion | What is better right now | +|---|---|---|---| +| **Core architecture** | Multi-pass graph indexing engine with project DBs | Retrieval-oriented local repo indexing service | **CBM** | +| **Primary data model** | Nodes, edges, graph schema, semantic edges, structural relationships | Chunk embeddings + BM25 + lightweight import/call graph | **CBM** | +| **Serving model** | HTTP bridge over a single stdio MCP subprocess | Native FastMCP over Streamable HTTP | **Orion** | +| **Repo onboarding** | Manifest-driven fleet indexing, webhooks, manual re-index endpoints | Local path indexing and Git repo discovery | **Orion** for local dev | +| **Index persistence** | Per-project SQLite DB files with query-only reopen and integrity checks | ChromaDB local persistence + pickle BM25 + JSON graph/meta | **CBM** | +| **Natural-language answer flow** | Tool-driven; analysis comes from graph tools and downstream client behavior | Explicit hybrid search -> rerank -> expand -> LLM answer pipeline | **Orion** | +| **Impact analysis surface** | Native via graph tools like `trace_path`, `detect_changes`, `query_graph` | Indirect via retrieved chunks + LLM synthesis | **CBM** | +| **Durability discipline** | WAL, integrity checks, atomic dump flow, explicit query-only open | Local files, limited safety model, simpler but weaker persistence story | **CBM** | +| **Operational simplicity** | More moving parts | Simpler runtime shape | **Orion** | +| **Scaling readiness** | Strong engine, weaker orchestration layer | Simpler service, weaker indexing/storage model | **Split** | + +--- + +## Indexing Pipeline: One-to-One Comparison + +### High-level flow + +| Step | Codebase Memory MCP | Project Orion | Better implementation | +|---|---|---|---| +| 1. Repo input | Clone/update repo from manifest into cache dir | Discover local Git repos or accept explicit repo path | Depends on use case | +| 2. File discovery | Structured discover pass in C pipeline | `scan_repo()` walks repo and filters files | **CBM** | +| 3. Parse/extract | Parallel extract/resolve workers | Sequential parser loop per file batch | **CBM** | +| 4. Intermediate model | In-memory graph buffer + registry | Batch chunk list + BM25 record list + graph record list | **CBM** | +| 5. Semantic layer | Native semantic edge generation and graph enrichment | Vector search index built from chunks; no graph-native semantic edge layer | **CBM** | +| 6. Storage output | Single project SQLite DB with graph + indexes | Chroma collection + BM25 pickle + graph JSON + meta JSON | **CBM** | +| 7. Re-index behavior | Supports incremental mode in engine | Deletes collection and rebuilds from scratch | **CBM** | + +### Why CBM's indexer is technically stronger + +| Capability | CBM | Orion | Gap | +|---|---|---|---| +| Parallel parse/extract | Yes | No | Major CBM advantage | +| Incremental indexing | Yes | No | Major CBM advantage | +| Rich structural graph | Yes | Partial | Major CBM advantage | +| Single-source storage artifact | Mostly yes, per project DB | No, split across multiple file types | CBM advantage | +| Built-in semantic graph layer | Yes | No, relies on retrieval embeddings instead | CBM advantage | +| Query-time graph-native impact tracing | Yes | No | CBM advantage | + +### Why Orion still feels good for some workflows + +| Capability | CBM | Orion | Gap | +|---|---|---|---| +| Index arbitrary local repo path quickly | Not the primary UX | Yes | Orion advantage | +| Discover repos in a workspace automatically | Not the primary UX | Yes | Orion advantage | +| Explain code with explicit retrieval pipeline | Indirect | Yes | Orion advantage | +| Surface NL-friendly telemetry from search/rerank/LLM | Limited at bridge level | Yes | Orion advantage | + +--- + +## Retrieval and Querying: One-to-One Comparison + +| Dimension | Codebase Memory MCP | Project Orion | Better implementation | +|---|---|---|---| +| **Primary query primitive** | Graph and tool calls | Hybrid retrieval + LLM synthesis | Depends on task | +| **Best for "find exact structural impact"** | Excellent | Weaker | **CBM** | +| **Best for "answer my question in natural language"** | Requires tool orchestration | Native design | **Orion** | +| **Best for "where should I make the change?"** | Strong because of graph tracing and change impact | Good when retrieval finds the right chunks | **CBM** | +| **Best for "give me context quickly"** | Good if indexed repo is healthy and query tools are used correctly | Very good due to rerank/expand flow | Slight **Orion** advantage | + +### Query strategy comparison + +| Query layer | Codebase Memory MCP | Project Orion | +|---|---|---| +| Full-text search | Native `search_graph` / `search_code` with structural ranking | BM25 over chunk tokens | +| Symbol search | Graph-native identifiers and qualified names | Symbol extraction + metadata heuristics | +| Semantic search | Engine-level semantic embeddings and semantic edges | Embedding similarity plus HyDE | +| Multi-hop analysis | Native graph traversal | BFS expansion over stored import/call graph | +| LLM answer generation | External/client-side orchestration pattern | First-class in the engine | + +### What CBM does better on analysis quality + +- It operates on a stronger representation of the codebase. +- It can answer structural questions without forcing everything through an LLM. +- It has native tools for graph schema, architecture, path tracing, and change detection. + +### What Orion does better on analysis UX + +- It makes the retrieval pipeline explicit and inspectable. +- It combines vector search, BM25, HyDE, symbol search, reranking, and context expansion in a clean path. +- It is easier to understand why an answer was produced. + +--- + +## MCP and API Serving Comparison + +| Dimension | Codebase Memory MCP | Project Orion | Better implementation | +|---|---|---|---| +| **MCP server type** | HTTP bridge to stdio subprocess | Native FastMCP HTTP server | **Orion** | +| **Transport shape** | Bridge layer converts HTTP JSON-RPC into subprocess calls | Streamable HTTP MCP directly | **Orion** | +| **Concurrency model** | Bridge serializes through a single subprocess client | Native server process, simpler runtime path | **Orion** | +| **Auth model** | Bearer token at bridge layer | Bearer token middleware + transport security | Slight **Orion** advantage | +| **Operational complexity** | Higher | Lower | **Orion** | + +### Important implementation truth + +CBM's main serving weakness is **not** the engine. It is the wrapper design: + +- `ghl/internal/mcp/client.go` serializes all requests behind one mutex. +- `ghl/internal/bridge/bridge.go` is still a bridge pattern, not a fully direct engine-native HTTP service. + +By contrast, Orion's MCP surface is conceptually cleaner: + +- `FastMCP` +- `streamable_http_path="/"` +- explicit transport security settings + +So on MCP hosting quality alone, Orion is ahead. + +--- + +## Storage, Durability, and Reliability Comparison + +| Dimension | Codebase Memory MCP | Project Orion | Better implementation | +|---|---|---|---| +| **Storage unit** | One DB per indexed project | Multiple local artifacts per repo | **CBM** | +| **Integrity checks** | Yes | Minimal | **CBM** | +| **Crash safety** | Stronger | Weaker | **CBM** | +| **Read-only query open** | Yes | No equivalent discipline | **CBM** | +| **Re-index safety** | Better in engine design | Rebuild-oriented | **CBM** | + +### Reliability observations + +| Concern | Codebase Memory MCP | Project Orion | +|---|---|---| +| Corrupt store detection | Explicitly checks integrity before use | No equivalent strong guard observed | +| Project existence validation | Explicitly validates project exists in DB | Uses metadata + collection lookup | +| Atomic persistence story | Stronger | Weaker | +| Live deployment reliability | Currently reduced by wrapper/deployment issues | Simpler single-node app, but not platform-grade durable | + +### Important non-biased caveat + +CBM's **implementation** is stronger than its **current deployment behavior**. + +In practice today: + +- the CBM engine is strong +- the current fleet wrapper and deployment choices are the main reliability bottleneck + +That distinction matters. The weakness is mostly in orchestration, cache-pathing, and wrapper behavior, not in the engine design itself. + +--- + +## Scaling and Multi-Pod Readiness + +| Dimension | Codebase Memory MCP | Project Orion | Better implementation | +|---|---|---|---| +| **Current replica strategy** | Single replica, `Recreate`, `ReadWriteOnce` PVC | Single replica, `Recreate`, `emptyDir` | Neither | +| **Multi-writer safety today** | No | No | Neither | +| **Reader/writer split potential** | High | Moderate | **CBM** | +| **Current shared-state design** | Better engine foundation, but wrapper is not horizontally safe | Explicitly local-only | **CBM**, but still not ready | + +### Direct comparison + +| Scaling question | Codebase Memory MCP | Project Orion | +|---|---|---| +| Can it safely run multi-pod as deployed now? | No | No | +| Can it evolve into 1 writer + N readers? | Yes, with the right topology | Harder, because storage and state model need larger changes | +| Is the current deployment intentionally single-writer? | Yes | Yes | + +### Bottom line on scale + +- CBM has the better **path to scale** +- Orion has the simpler **single-node path** +- neither is a genuine multi-pod, shared-state, horizontally safe service today + +--- + +## Test and Validation Surface + +| Dimension | Codebase Memory MCP | Project Orion | Better implementation | +|---|---|---|---| +| **Breadth of tests** | Broad C + Go test coverage across engine, store, MCP, incremental indexing, parallelism | Minimal API/discovery tests | **CBM** | +| **Depth of engine validation** | High | Low | **CBM** | +| **MCP/server validation** | Present | Present but smaller | **CBM** overall | + +### Practical meaning + +This is one of the clearest objective gaps in the codebases. + +- CBM looks like a system that has been tested as an engine. +- Orion looks like a system that has been proven enough to demo and iterate, but not hardened to the same degree. + +--- + +## What Is Working Well in Codebase Memory MCP + +| Area | What is working well | Why it matters | +|---|---|---| +| Indexing engine | Parallel, graph-native, structurally rich | Better throughput and better analysis primitives | +| Change impact tooling | Native tracing and change-detection tools | Better for real engineering workflows | +| Persistence model | SQLite per project with integrity/dump discipline | Better reliability and easier query correctness guarantees | +| Semantic layer | Built into the engine | More useful structural-semantic analysis | +| Test coverage | Broad and deep | Higher confidence in correctness | + +--- + +## What Is Working Well in Project Orion + +| Area | What is working well | Why it matters | +|---|---|---| +| MCP serving | Native FastMCP streamable HTTP | Cleaner client experience | +| Local repo UX | Easy discovery and path-based indexing | Faster developer adoption | +| Retrieval flow | Hybrid search + rerank + context expansion | Better natural-language answer pipeline | +| Simplicity | Fewer architectural layers | Easier to reason about and debug | +| Developer-facing telemetry | Exposes retrieval and LLM stages clearly | Better explainability for analysis results | + +--- + +## Real Gaps: One-to-One + +| Gap | CBM status | Orion status | Who is ahead | +|---|---|---|---| +| Graph-native code intelligence | Strong | Partial | **CBM** | +| Hosted MCP quality | Good enough after bridge fixes, but still bridge-based | Cleaner native implementation | **Orion** | +| Incremental indexing | Present | Missing | **CBM** | +| Natural-language answer pipeline | External/client-oriented | First-class | **Orion** | +| Large-scale index economics | Better foundation | Poor today | **CBM** | +| Local developer usability | Weaker | Stronger | **Orion** | +| Durability discipline | Stronger | Weaker | **CBM** | +| Test maturity | Stronger | Weaker | **CBM** | + +--- + +## Final Recommendation + +### If the team must choose a technical foundation + +Choose **Codebase Memory MCP** as the foundation for long-term code intelligence. + +Reason: + +- better engine +- better graph model +- better impact-analysis tools +- better storage discipline +- better test surface +- better path to serious scale + +### If the team must choose a short-term developer experience winner + +Choose **Project Orion's serving model and UX patterns**. + +Reason: + +- simpler HTTP MCP surface +- easier local repo onboarding +- stronger natural-language retrieval pipeline +- easier to operate as a straightforward service + +### Best combined direction + +The strongest combined architecture is: + +1. **Keep CBM as the indexer and graph engine** +2. **Borrow Orion's cleaner server/retrieval UX ideas** +3. **Do not replace CBM's engine with Orion's current indexer** +4. **Do not treat Orion as multi-pod or large-scale ready without major rework** + +--- + +## Bottom Line in One Sentence + +**Codebase Memory MCP is the stronger technical engine; Project Orion is the cleaner developer-facing service; the best platform direction is to keep CBM's core and adopt Orion's best UX and transport ideas.** diff --git a/ghl/cmd/genlocalmanifest/main.go b/ghl/cmd/genlocalmanifest/main.go new file mode 100644 index 00000000..2152fe4f --- /dev/null +++ b/ghl/cmd/genlocalmanifest/main.go @@ -0,0 +1,137 @@ +package main + +import ( + "flag" + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/manifest" + "gopkg.in/yaml.v3" +) + +func main() { + repoRoot := mustFindRepoRoot() + defaultWorkspace := filepath.Dir(repoRoot) + + workspaceRoot := flag.String("workspace-root", defaultWorkspace, "Workspace root containing local Git repos") + inputPath := flag.String("input", filepath.Join(repoRoot, "REPOS.yaml"), "Source manifest path") + outputPath := flag.String("output", filepath.Join(repoRoot, "REPOS.local.yaml"), "Generated local manifest path") + flag.Parse() + + m, err := manifest.Load(*inputPath) + if err != nil { + exitf("load manifest: %v", err) + } + + localRemotes, localDirs, err := scanWorkspace(*workspaceRoot) + if err != nil { + exitf("scan workspace: %v", err) + } + + filtered := manifest.Manifest{Repos: make([]manifest.Repo, 0, len(m.Repos))} + for _, repo := range m.Repos { + if localRemotes[canonicalGitHubURL(repo.GitHubURL)] || localDirs[repo.Name] { + filtered.Repos = append(filtered.Repos, repo) + } + } + + if err := writeManifest(*outputPath, *workspaceRoot, *inputPath, filtered); err != nil { + exitf("write manifest: %v", err) + } + + fmt.Printf("generated %s with %d repos (from %d total)\n", *outputPath, len(filtered.Repos), len(m.Repos)) +} + +func mustFindRepoRoot() string { + wd, err := os.Getwd() + if err != nil { + exitf("getwd: %v", err) + } + current := wd + for { + if _, err := os.Stat(filepath.Join(current, "REPOS.yaml")); err == nil { + return current + } + parent := filepath.Dir(current) + if parent == current { + exitf("could not locate repo root from %s", wd) + } + current = parent + } +} + +func scanWorkspace(workspaceRoot string) (map[string]bool, map[string]bool, error) { + entries, err := os.ReadDir(workspaceRoot) + if err != nil { + return nil, nil, err + } + + remotes := make(map[string]bool, len(entries)) + dirs := make(map[string]bool, len(entries)) + for _, entry := range entries { + if !entry.IsDir() { + continue + } + repoDir := filepath.Join(workspaceRoot, entry.Name()) + if _, err := os.Stat(filepath.Join(repoDir, ".git")); err != nil { + continue + } + dirs[entry.Name()] = true + remote, err := gitRemote(repoDir) + if err != nil { + continue + } + remotes[canonicalGitHubURL(remote)] = true + } + return remotes, dirs, nil +} + +func gitRemote(repoDir string) (string, error) { + cmd := exec.Command("git", "-C", repoDir, "remote", "get-url", "origin") + out, err := cmd.Output() + if err != nil { + return "", err + } + return strings.TrimSpace(string(out)), nil +} + +func canonicalGitHubURL(raw string) string { + url := strings.TrimSpace(raw) + switch { + case strings.HasPrefix(url, "git@github.com:"): + url = "https://github.com/" + strings.TrimPrefix(url, "git@github.com:") + case strings.HasPrefix(url, "ssh://git@github.com/"): + url = "https://github.com/" + strings.TrimPrefix(url, "ssh://git@github.com/") + } + url = strings.TrimSuffix(url, ".git") + url = strings.TrimRight(url, "/") + return strings.ToLower(url) +} + +func writeManifest(outputPath, workspaceRoot, inputPath string, m manifest.Manifest) error { + data, err := yaml.Marshal(m) + if err != nil { + return err + } + + header := []string{ + "# REPOS.local.yaml — generated local fleet manifest", + fmt.Sprintf("# workspace_root: %s", workspaceRoot), + fmt.Sprintf("# source_manifest: %s", inputPath), + "# Regenerate from ./ghl with: go run ./cmd/genlocalmanifest", + "", + } + + if err := os.MkdirAll(filepath.Dir(outputPath), 0750); err != nil { + return err + } + return os.WriteFile(outputPath, []byte(strings.Join(header, "\n")+string(data)), 0644) +} + +func exitf(format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, format+"\n", args...) + os.Exit(1) +} diff --git a/ghl/cmd/server/main.go b/ghl/cmd/server/main.go new file mode 100644 index 00000000..31325833 --- /dev/null +++ b/ghl/cmd/server/main.go @@ -0,0 +1,1658 @@ +// ghl-fleet — GHL additions to codebase-memory-mcp. +// +// Runs three services in one process: +// - HTTP bridge: exposes the codebase-memory-mcp binary as an HTTP MCP endpoint +// - Fleet indexer: clones + indexes all 200 GHL repos on a schedule +// - Webhook handler: triggers re-index on GitHub push events +package main + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "log/slog" + "net/http" + "os" + "os/exec" + "os/signal" + "path/filepath" + "runtime" + "strings" + "sync" + "sync/atomic" + "syscall" + "time" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "github.com/robfig/cron/v3" + + ghlauth "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/auth" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/bridge" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/cachepersist" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/discovery" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/indexer" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/manifest" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/mcp" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/searchtools" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/webhook" +) + +var supportedProtocolVersions = []string{ + "2025-11-25", + "2025-06-18", + "2025-03-26", + "2024-11-05", +} + +func main() { + logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelInfo})) + slog.SetDefault(logger) + + cfg := loadConfig() + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + if err := os.MkdirAll(cfg.CloneCacheDir, 0o750); err != nil { + slog.Error("failed to create clone cache dir", "path", cfg.CloneCacheDir, "err", err) + os.Exit(1) + } + if err := os.MkdirAll(cfg.CBMCacheDir, 0o750); err != nil { + slog.Error("failed to create cbm cache dir", "path", cfg.CBMCacheDir, "err", err) + os.Exit(1) + } + + var artifactSync *cachepersist.Syncer + if cfg.ArtifactsEnabled { + var err error + switch strings.ToLower(strings.TrimSpace(cfg.ArtifactsBackend)) { + case "gcs": + artifactSync, err = cachepersist.NewGCS(ctx, cfg.CBMCacheDir, cfg.ArtifactsBucket, cfg.ArtifactsPrefix) + default: + artifactSync, err = cachepersist.New(cfg.CBMCacheDir, cfg.ArtifactDir) + } + if err != nil { + slog.Error("failed to initialize artifact sync", "runtime_dir", cfg.CBMCacheDir, "artifact_dir", cfg.ArtifactDir, "err", err) + os.Exit(1) + } + defer func() { + if err := artifactSync.Close(); err != nil { + slog.Warn("failed to close artifact sync", "err", err) + } + }() + if cfg.ArtifactsSkipHydrate { + slog.Info("skipping persisted index hydrate", "artifact_dir", cfg.ArtifactDir, "cache_dir", cfg.CBMCacheDir) + } else { + hydrateStart := time.Now() + hydrated, err := artifactSync.Hydrate() + if err != nil { + slog.Warn("failed to hydrate persisted indexes (continuing with empty cache)", "err", err, "duration", time.Since(hydrateStart)) + } else { + slog.Info("hydration complete", "files", hydrated, "duration", time.Since(hydrateStart), "artifact_dir", cfg.ArtifactDir, "cache_dir", cfg.CBMCacheDir) + } + } + } + + // ── Load fleet manifest (YAML first for fast startup) ──── + + m, err := manifest.Load(cfg.ReposManifest) + if err != nil { + slog.Error("failed to load repos manifest", "path", cfg.ReposManifest, "err", err) + os.Exit(1) + } + slog.Info("fleet manifest loaded", "repos", len(m.Repos)) + + cloner := &gitCloner{ + logger: logger, + githubToken: cfg.GitHubToken, + } + + // activityChecker filters stale repos during fleet runs. + var actChecker indexer.ActivityChecker + if cfg.GitHubToken != "" { + actChecker = &githubActivityChecker{ + token: cfg.GitHubToken, + baseURL: cfg.GitHubAPIBaseURL, + org: firstOrg(cfg.GitHubAllowedOrgs), + days: 7, + } + } + + newFleetIndexer := func(client indexer.Client, discoverySvc *discovery.Discoverer) *indexer.Indexer { + return indexer.New(indexer.Config{ + Client: client, + Cloner: cloner, + CacheDir: cfg.CloneCacheDir, + Concurrency: cfg.Concurrency, + ProjectNameFunc: func(repoSlug string) string { + return projectNameFromPath(filepath.Join(cfg.CloneCacheDir, repoSlug)) + }, + ActivityChecker: actChecker, + OnRepoStart: func(slug string) { slog.Info("indexing repo", "repo", slug) }, + OnRepoDone: func(slug string, err error) { + if err != nil { + slog.Error("repo indexing failed", "repo", slug, "err", err) + return + } + if artifactSync != nil { + clonePath := filepath.Join(cfg.CloneCacheDir, slug) + projectName := projectNameFromPath(clonePath) + persisted, persistErr := artifactSync.PersistProject(projectName) + if persistErr != nil { + slog.Error("failed to persist project index", "repo", slug, "project", projectName, "err", persistErr) + } else { + slog.Info("persisted project index", "repo", slug, "project", projectName, "files", persisted) + } + } + if discoverySvc != nil { + discoverySvc.Invalidate() + } + slog.Info("repo indexed", "repo", slug) + }, + OnAllComplete: func(result indexer.IndexResult) { + slog.Info("fleet indexing complete", "total", result.Total, "ok", result.Succeeded, "failed", result.Failed, "skipped", result.Skipped) + }, + }) + } + + if cfg.RunMode == "index-all" { + indexPool, err := newMCPIndexClientPool(ctx, cfg.BinaryPath, cfg.IndexerClients, cfg.IndexerClientMaxUses) + if err != nil { + slog.Error("failed to start indexer client pool", "clients", cfg.IndexerClients, "err", err) + os.Exit(1) + } + defer indexPool.Close() + slog.Info("indexer client pool started", "clients", cfg.IndexerClients, "max_uses", cfg.IndexerClientMaxUses) + + idx := newFleetIndexer(indexPool, nil) + slog.Info("running one-shot fleet indexing job", "force", cfg.RunForce) + result := idx.IndexAll(context.Background(), m.Repos, cfg.RunForce) + slog.Info("one-shot fleet indexing complete", "total", result.Total, "ok", result.Succeeded, "failed", result.Failed, "skipped", result.Skipped) + if result.Failed > 0 { + os.Exit(1) + } + return + } + + // ── Start MCP binary clients ───────────────────────────── + + bridgePool, err := newMCPBridgeClientPool(ctx, cfg.BinaryPath, cfg.BridgeClients, cfg.BridgeAcquireTimeout) + if err != nil { + slog.Error("failed to start bridge client pool", "binary", cfg.BinaryPath, "clients", cfg.BridgeClients, "err", err) + os.Exit(1) + } + defer bridgePool.Close() + slog.Info( + "bridge client pool started", + "name", bridgePool.ServerInfo().Name, + "version", bridgePool.ServerInfo().Version, + "clients", cfg.BridgeClients, + "acquire_timeout_ms", cfg.BridgeAcquireTimeout.Milliseconds(), + ) + + indexPool, err := newMCPIndexClientPool(ctx, cfg.BinaryPath, cfg.IndexerClients, cfg.IndexerClientMaxUses) + if err != nil { + slog.Error("failed to start indexer client pool", "clients", cfg.IndexerClients, "err", err) + os.Exit(1) + } + defer indexPool.Close() + slog.Info("indexer client pool started", "clients", cfg.IndexerClients, "max_uses", cfg.IndexerClientMaxUses) + + discoveryPool, err := newMCPDiscoveryClientPool(ctx, cfg.BinaryPath, cfg.DiscoveryClients) + if err != nil { + slog.Error("failed to start discovery client pool", "clients", cfg.DiscoveryClients, "err", err) + os.Exit(1) + } + defer discoveryPool.Close() + slog.Info("discovery client pool started", "clients", cfg.DiscoveryClients) + + var requestAuthenticator bridge.Authenticator + { // Auth is always on — no env flag + requestAuthenticator = ghlauth.NewGitHubAuthenticator(ghlauth.GitHubConfig{ + BaseURL: cfg.GitHubAPIBaseURL, + AllowedOrgs: cfg.GitHubAllowedOrgs, + CacheTTL: cfg.GitHubAuthCacheTTL, + }) + slog.Info("github bearer auth enabled", "allowed_orgs", cfg.GitHubAllowedOrgs) + } + + // ── Build indexer ──────────────────────────────────────── + + var discoverySvc *discovery.Discoverer + maxGraphCandidates := 3 + if cfg.DiscoveryMaxCandidates > 0 && cfg.DiscoveryMaxCandidates < maxGraphCandidates { + maxGraphCandidates = cfg.DiscoveryMaxCandidates + } + discoverySvc = discovery.NewService(discoveryPool, *m, discovery.Options{ + MaxBM25Candidates: cfg.DiscoveryMaxCandidates, + MaxGraphCandidates: maxGraphCandidates, + RequestTimeout: cfg.DiscoveryTimeout, + }) + idx := newFleetIndexer(indexPool, discoverySvc) + + var fleetIndexing atomic.Bool + startFleetIndex := func(reason string, force bool) bool { + if !fleetIndexing.CompareAndSwap(false, true) { + slog.Warn("fleet index already running", "reason", reason, "force", force) + return false + } + go func() { + defer fleetIndexing.Store(false) + slog.Info("fleet index starting", "reason", reason, "force", force) + result := idx.IndexAll(context.Background(), m.Repos, force) + slog.Info("fleet index complete", "reason", reason, "force", force, "total", result.Total, "ok", result.Succeeded, "failed", result.Failed, "skipped", result.Skipped) + }() + return true + } + + // ── Fleet scheduler ────────────────────────────────────── + + c := cron.New() + { // Scheduled indexing — always on + c.AddFunc(cfg.IncrementalCron, func() { + startFleetIndex("cron-incremental", false) + }) + c.AddFunc(cfg.FullCron, func() { + startFleetIndex("cron-full", true) + }) + c.Start() + defer c.Stop() + slog.Info("scheduled indexing enabled", "incremental_cron", cfg.IncrementalCron, "full_cron", cfg.FullCron) + } + + // ── HTTP router ────────────────────────────────────────── + + r := chi.NewRouter() + r.Use(middleware.RequestID) + r.Use(middleware.RealIP) + r.Use(middleware.Recoverer) + r.Use(middleware.Timeout(5 * time.Minute)) + + // Search result cache — per-instance, 60 s TTL, 1000 entry max. + searchCache := bridge.NewSearchCache(1000, 60*time.Second) + slog.Info("search result cache enabled", "max_size", 1000, "ttl_s", 60) + + // Bridge: forward MCP calls to the binary + bridgeHandler := bridge.NewHandler( + &mcpBridgeBackend{client: bridgePool, discovery: discoverySvc, cache: searchCache, cacheDir: cfg.CBMCacheDir}, + bridge.Config{BearerToken: cfg.BearerToken, Authenticator: requestAuthenticator}, + ) + r.Mount("/mcp", bridgeHandler) + r.Get("/health", bridgeHandler.ServeHTTP) + + requireAuth := makeAuthMiddleware(cfg.BearerToken, requestAuthenticator) + + // Webhook: trigger re-index on GitHub push + wh := webhook.NewHandler(webhook.Config{ + Secret: []byte(cfg.WebhookSecret), + OnPush: func(repoSlug string) { + repo, ok := m.FindByName(repoSlug) + if !ok { + slog.Warn("webhook: repo not in manifest", "repo", repoSlug) + return + } + slog.Info("webhook: re-indexing repo", "repo", repoSlug) + if err := idx.IndexRepo(context.Background(), repo, false); err != nil { + slog.Error("webhook: index failed", "repo", repoSlug, "err", err) + return + } + // Persist .db to GCS (same as fleet OnRepoDone) + if artifactSync != nil { + clonePath := filepath.Join(cfg.CloneCacheDir, repoSlug) + projectName := projectNameFromPath(clonePath) + if _, persistErr := artifactSync.PersistProject(projectName); persistErr != nil { + slog.Error("webhook: persist failed", "repo", repoSlug, "err", persistErr) + } else { + slog.Info("webhook: persisted", "repo", repoSlug) + } + } + if discoverySvc != nil { + discoverySvc.Invalidate() + } + }, + }) + r.Post("/webhooks/github", wh.ServeHTTP) + + // Manual trigger: index a single repo by slug. + // Runs the same persist as the fleet OnRepoDone callback. + r.Post("/index/{repoSlug}", requireAuth(func(w http.ResponseWriter, req *http.Request) { + slug := chi.URLParam(req, "repoSlug") + repo, ok := m.FindByName(slug) + if !ok { + http.Error(w, "repo not found in manifest", http.StatusNotFound) + return + } + go func() { + slog.Info("manual index: starting", "repo", slug) + indexCtx, cancel := context.WithTimeout(context.Background(), 30*time.Minute) + defer cancel() + if err := idx.IndexRepo(indexCtx, repo, true); err != nil { + slog.Error("manual index failed", "repo", slug, "err", err) + return + } + // Persist .db to GCS (same as fleet OnRepoDone) + if artifactSync != nil { + clonePath := filepath.Join(cfg.CloneCacheDir, slug) + projectName := projectNameFromPath(clonePath) + persisted, persistErr := artifactSync.PersistProject(projectName) + if persistErr != nil { + slog.Error("manual index: persist failed", "repo", slug, "project", projectName, "err", persistErr) + } else { + slog.Info("manual index: persisted", "repo", slug, "project", projectName, "files", persisted) + } + } + if discoverySvc != nil { + discoverySvc.Invalidate() + } + slog.Info("manual index complete", "repo", slug) + }() + w.WriteHeader(http.StatusAccepted) + fmt.Fprintf(w, `{"accepted":true,"repo":%q}`, slug) + })) + + r.Post("/index-all", requireAuth(func(w http.ResponseWriter, req *http.Request) { + force := req.URL.Query().Get("force") == "1" || strings.EqualFold(req.URL.Query().Get("force"), "true") + if !startFleetIndex("manual", force) { + http.Error(w, "fleet index already running", http.StatusConflict) + return + } + w.WriteHeader(http.StatusAccepted) + fmt.Fprintf(w, `{"accepted":true,"force":%t}`, force) + })) + + // Fleet status endpoint + r.Get("/status", requireAuth(func(w http.ResponseWriter, req *http.Request) { + artifactCount := 0 + artifactLocation := cfg.ArtifactDir + if artifactSync != nil { + count, err := artifactSync.CountArtifacts() + if err != nil { + slog.Warn("failed to count persisted indexes", "err", err) + } else { + artifactCount = count + } + artifactLocation = artifactSync.ArtifactDir + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "repos": len(m.Repos), + "version": bridgePool.ServerInfo().Version, + "binary": cfg.BinaryPath, + "clone_cache": cfg.CloneCacheDir, + "cbm_cache": cfg.CBMCacheDir, + "artifact_dir": artifactLocation, + "artifact_files": artifactCount, + "artifacts_enabled": cfg.ArtifactsEnabled, + "manifest": cfg.ReposManifest, + "concurrency": cfg.Concurrency, + "bridge_clients": cfg.BridgeClients, + "bridge_acquire_timeout": cfg.BridgeAcquireTimeout.Milliseconds(), + "indexer_clients": cfg.IndexerClients, + "discovery_clients": cfg.DiscoveryClients, + "discovery_max_candidates": cfg.DiscoveryMaxCandidates, + "discovery_timeout_ms": cfg.DiscoveryTimeout.Milliseconds(), + "startup_index_enabled": false, + "scheduled_index_enabled": true, + "fleet_index_running": fleetIndexing.Load(), + "github_auth_enabled": true, + }) + })) + + srv := &http.Server{ + Addr: ":" + cfg.Port, + Handler: r, + ReadTimeout: 30 * time.Second, + WriteTimeout: 10 * time.Minute, + IdleTimeout: 120 * time.Second, + } + + // ── Startup indexing pass ──────────────────────────────── + + // Startup indexing disabled — hydration from GCS is sufficient. + // Scheduled cron handles ongoing indexing. + + // ── Serve ──────────────────────────────────────────────── + + go func() { + slog.Info("server listening", "addr", srv.Addr) + if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { + slog.Error("server error", "err", err) + stop() + } + }() + + <-ctx.Done() + slog.Info("shutting down...") + + shutdownCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + if err := srv.Shutdown(shutdownCtx); err != nil { + slog.Error("server shutdown error", "err", err) + } +} + +func makeAuthMiddleware(staticToken string, auth bridge.Authenticator) func(http.HandlerFunc) http.HandlerFunc { + return func(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + authHeader := req.Header.Get("Authorization") + if auth != nil { + if !strings.HasPrefix(authHeader, "Bearer ") { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + if err := auth.Authenticate(req.Context(), strings.TrimPrefix(authHeader, "Bearer ")); err != nil { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + } else if staticToken != "" { + if !strings.HasPrefix(authHeader, "Bearer ") || strings.TrimPrefix(authHeader, "Bearer ") != staticToken { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + } + next(w, req) + } + } +} + +// ── Config ───────────────────────────────────────────────────── + +type config struct { + Port string + BinaryPath string + CloneCacheDir string + CBMCacheDir string + ArtifactDir string + ArtifactsEnabled bool + ArtifactsBackend string + ArtifactsBucket string + ArtifactsPrefix string + ArtifactsSkipHydrate bool + ReposManifest string + BearerToken string + GitHubToken string + GitHubAuthEnabled bool + GitHubAllowedOrgs []string + GitHubAPIBaseURL string + GitHubAuthCacheTTL time.Duration + WebhookSecret string + Concurrency int + BridgeClients int + BridgeAcquireTimeout time.Duration + IndexerClients int + IndexerClientMaxUses int + DiscoveryClients int + DiscoveryMaxCandidates int + DiscoveryTimeout time.Duration + IncrementalCron string + FullCron string + StartupIndexEnabled bool + ScheduledIndexingEnabled bool + RunMode string + RunForce bool +} + +func loadConfig() config { + getEnv := func(key, def string) string { + if v := os.Getenv(key); v != "" { + return v + } + return def + } + getBool := func(key string, def bool) bool { + v := strings.TrimSpace(getEnv(key, "")) + if v == "" { + return def + } + switch strings.ToLower(v) { + case "1", "true", "yes", "on": + return true + case "0", "false", "no", "off": + return false + default: + return def + } + } + getStringList := func(key string) []string { + raw := strings.TrimSpace(getEnv(key, "")) + if raw == "" { + return nil + } + parts := strings.Split(raw, ",") + out := make([]string, 0, len(parts)) + for _, part := range parts { + part = strings.TrimSpace(part) + if part != "" { + out = append(out, part) + } + } + return out + } + getConcurrency := func() int { + v := getEnv("FLEET_CONCURRENCY", "5") + n := 5 + fmt.Sscanf(v, "%d", &n) + return n + } + getBridgeClients := func() int { + v := getEnv("BRIDGE_CLIENTS", "") + if v == "" { + n := runtime.GOMAXPROCS(0) + if n < 2 { + return 2 + } + if n > 4 { + return 4 + } + return n + } + n := 1 + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return 1 + } + return n + } + getBridgeAcquireTimeout := func() time.Duration { + v := getEnv("BRIDGE_ACQUIRE_TIMEOUT_MS", "1500") + n := 1500 + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return 1500 * time.Millisecond + } + return time.Duration(n) * time.Millisecond + } + getIndexerClients := func(concurrency int) int { + v := getEnv("INDEXER_CLIENTS", "") + if v == "" { + return concurrency + } + n := concurrency + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return concurrency + } + return n + } + getIndexerClientMaxUses := func() int { + v := getEnv("INDEXER_CLIENT_MAX_USES", "1") + n := 1 + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return 1 + } + return n + } + getDiscoveryClients := func(concurrency int) int { + v := getEnv("DISCOVERY_CLIENTS", "") + if v == "" { + if concurrency < 2 { + return 2 + } + return concurrency + } + n := concurrency + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + if concurrency < 2 { + return 2 + } + return concurrency + } + return n + } + getDiscoveryMaxCandidates := func() int { + v := getEnv("DISCOVERY_MAX_CANDIDATES", "5") + n := 5 + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return 5 + } + return n + } + getDiscoveryTimeout := func() time.Duration { + v := getEnv("DISCOVERY_TIMEOUT_MS", "5000") + n := 5000 + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return 5 * time.Second + } + return time.Duration(n) * time.Millisecond + } + getGitHubAuthCacheTTL := func() time.Duration { + v := getEnv("GITHUB_AUTH_CACHE_TTL_MS", "300000") + n := 300000 + fmt.Sscanf(v, "%d", &n) + if n <= 0 { + return 5 * time.Minute + } + return time.Duration(n) * time.Millisecond + } + concurrency := getConcurrency() + return config{ + Port: getEnv("PORT", "8080"), + BinaryPath: getEnv("CBM_BINARY", defaultBinaryPath()), + CloneCacheDir: getEnv("FLEET_CACHE_DIR", "/data/fleet-cache/repos"), + CBMCacheDir: getEnv("CBM_CACHE_DIR", "/tmp/codebase-memory-mcp"), + ArtifactDir: getEnv("CBM_ARTIFACT_DIR", "/data/fleet-cache/indexes"), + ArtifactsEnabled: getBool("ARTIFACTS_ENABLED", true), + ArtifactsBackend: getEnv("ARTIFACTS_BACKEND", "filesystem"), + ArtifactsBucket: getEnv("ARTIFACTS_BUCKET", ""), + ArtifactsPrefix: getEnv("ARTIFACTS_PREFIX", ""), + ArtifactsSkipHydrate: getBool("ARTIFACTS_SKIP_HYDRATE", false), + ReposManifest: getEnv("REPOS_MANIFEST", defaultManifestPath()), + BearerToken: getEnv("BEARER_TOKEN", ""), + GitHubToken: getEnv("GITHUB_TOKEN", ""), + GitHubAuthEnabled: getBool("GITHUB_AUTH_ENABLED", false), + GitHubAllowedOrgs: getStringList("GITHUB_ALLOWED_ORGS"), + GitHubAPIBaseURL: getEnv("GITHUB_API_BASE_URL", "https://api.github.com"), + GitHubAuthCacheTTL: getGitHubAuthCacheTTL(), + WebhookSecret: getEnv("GITHUB_WEBHOOK_SECRET", ""), + Concurrency: concurrency, + BridgeClients: getBridgeClients(), + BridgeAcquireTimeout: getBridgeAcquireTimeout(), + IndexerClients: getIndexerClients(concurrency), + IndexerClientMaxUses: getIndexerClientMaxUses(), + DiscoveryClients: getDiscoveryClients(concurrency), + DiscoveryMaxCandidates: getDiscoveryMaxCandidates(), + DiscoveryTimeout: getDiscoveryTimeout(), + IncrementalCron: getEnv("CRON_INCREMENTAL", "0 */6 * * *"), + FullCron: getEnv("CRON_FULL", "0 2 * * 0"), + StartupIndexEnabled: getBool("STARTUP_INDEX_ENABLED", false), + ScheduledIndexingEnabled: getBool("SCHEDULED_INDEXING_ENABLED", false), + RunMode: strings.TrimSpace(getEnv("RUN_MODE", "serve")), + RunForce: getBool("RUN_FORCE", false), + } +} + + +func defaultManifestPath() string { + candidates := []string{ + "/app/REPOS.local.yaml", + "/app/REPOS.yaml", + } + for _, candidate := range candidates { + if _, err := os.Stat(candidate); err == nil { + return candidate + } + } + return "/app/REPOS.yaml" +} + +// projectNamePrefix is hardcoded — always use this prefix for consistent naming. +var projectNamePrefix = "data-fleet-cache-repos" + +// projectNameFromPath returns the canonical project name for a clone path. +// When PROJECT_NAME_PREFIX is set, it uses prefix + slug (e.g. +// "data-fleet-cache-repos-membership-backend"). Otherwise it falls back to +// replacing path separators with dashes. +func projectNameFromPath(absPath string) string { + path := filepath.ToSlash(strings.TrimSpace(absPath)) + if path == "" { + return "root" + } + + // Preferred: prefix + last path segment (the repo slug). + if projectNamePrefix != "" { + slug := filepath.Base(path) + if slug == "" || slug == "." || slug == "/" { + return "root" + } + return projectNamePrefix + "-" + slug + } + + // Fallback (local dev, no prefix set): replace separators with dashes. + var b strings.Builder + b.Grow(len(path)) + prevDash := false + for _, r := range path { + if r == '/' || r == ':' { + if prevDash { + continue + } + b.WriteByte('-') + prevDash = true + continue + } + b.WriteRune(r) + prevDash = r == '-' + } + + project := strings.Trim(b.String(), "-") + if project == "" { + return "root" + } + return project +} + +// ── Activity checking ────────────────────────────────────────── + +// githubActivityChecker implements indexer.ActivityChecker using the GitHub API +// to skip repos with no commits in the last N days. +type githubActivityChecker struct { + token string + baseURL string + org string + days int +} + +func (c *githubActivityChecker) IsActive(ctx context.Context, repoName string) bool { + if c.token == "" || c.org == "" { + return true // can't check, assume active + } + url := fmt.Sprintf("%s/repos/%s/%s/commits?per_page=1", c.baseURL, c.org, repoName) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + slog.Warn("activity check: request build failed", "repo", repoName, "err", err) + return true + } + req.Header.Set("Authorization", "Bearer "+c.token) + req.Header.Set("Accept", "application/vnd.github+json") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + slog.Warn("activity check: request failed", "repo", repoName, "err", err) + return true // network error — assume active to avoid skipping + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + // 404 = repo deleted/renamed, 403 = rate limited; assume active to be safe + return true + } + + var commits []struct { + Commit struct { + Committer struct { + Date time.Time `json:"date"` + } `json:"committer"` + } `json:"commit"` + } + if err := json.NewDecoder(resp.Body).Decode(&commits); err != nil || len(commits) == 0 { + return true + } + + cutoff := time.Now().AddDate(0, 0, -c.days) + active := commits[0].Commit.Committer.Date.After(cutoff) + if !active { + slog.Info("activity check: stale repo, skipping", "repo", repoName, + "last_commit", commits[0].Commit.Committer.Date.Format(time.RFC3339), + "cutoff_days", c.days) + } + return active +} + +func firstOrg(orgs []string) string { + if len(orgs) > 0 { + return orgs[0] + } + return "" +} + +func defaultBinaryPath() string { + name := "codebase-memory-mcp" + if runtime.GOOS == "windows" { + name += ".exe" + } + exe, _ := os.Executable() + dir := filepath.Dir(exe) + candidate := filepath.Join(dir, name) + if _, err := os.Stat(candidate); err == nil { + return candidate + } + // Fallback: find in PATH + if path, err := exec.LookPath(name); err == nil { + return path + } + return name +} + +// ── Adapters ─────────────────────────────────────────────────── + +// gitCloner implements indexer.Cloner using git CLI. +type gitCloner struct { + logger *slog.Logger + githubToken string +} + +func (g *gitCloner) EnsureClone(ctx context.Context, githubURL, localPath string) error { + if _, err := os.Stat(filepath.Join(localPath, ".git")); err == nil { + // Already cloned — fetch latest + g.logger.Debug("updating clone", "path", localPath) + cmd := g.gitCommand(ctx, localPath, githubURL, "fetch", "--depth=1", "origin", "HEAD") + if out, err := cmd.CombinedOutput(); err != nil { + outStr := string(out) + if isGitHubHTTPSAuthError(outStr) { + g.logger.Warn("git fetch auth failed, using existing clone", "path", localPath) + if err := g.restoreWorkingTree(ctx, githubURL, localPath, "HEAD"); err != nil { + return err + } + return g.validateClone(localPath) + } + // Corrupt clone (e.g. "index file smaller than expected") — nuke and re-clone + if strings.Contains(outStr, "index file smaller") || + strings.Contains(outStr, "bad signature") || + strings.Contains(outStr, "index file corrupt") { + g.logger.Warn("corrupt git clone detected, removing for fresh clone", "path", localPath, "err", outStr) + os.RemoveAll(localPath) + // Fall through to fresh clone below + } else { + return fmt.Errorf("git fetch: %w\n%s", err, out) + } + } else { + if err := g.restoreWorkingTree(ctx, githubURL, localPath, "FETCH_HEAD"); err != nil { + return err + } + return g.validateClone(localPath) + } + } + // Fresh clone + if err := os.MkdirAll(localPath, 0750); err != nil { + return fmt.Errorf("mkdir %q: %w", localPath, err) + } + // Remove empty dir to allow clone into it + os.Remove(localPath) + g.logger.Info("cloning repo", "url", githubURL, "path", localPath) + // No timeout — large monorepos can take 20+ minutes to clone and index. + // The fleet indexer uses context.Background() which has no deadline. + cmd := g.gitCommand(ctx, "", githubURL, "clone", "--depth=1", githubURL, localPath) + if out, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("git clone %q: %w\n%s", githubURL, err, out) + } + return g.validateClone(localPath) +} + +func isGitHubHTTPSAuthError(output string) bool { + return strings.Contains(output, "could not read Username for 'https://github.com'") +} + +func (g *gitCloner) gitCommand(ctx context.Context, dir, githubURL string, args ...string) *exec.Cmd { + gitArgs := make([]string, 0, len(args)+4) + if g.githubToken != "" && strings.HasPrefix(githubURL, "https://github.com/") { + auth := base64.StdEncoding.EncodeToString([]byte("x-access-token:" + g.githubToken)) + gitArgs = append(gitArgs, + "-c", "credential.helper=", + "-c", "http.https://github.com/.extraheader=AUTHORIZATION: basic "+auth, + ) + } + gitArgs = append(gitArgs, args...) + cmd := exec.CommandContext(ctx, "git", gitArgs...) + if dir != "" { + cmd.Dir = dir + } + return cmd +} + +func (g *gitCloner) restoreWorkingTree(ctx context.Context, githubURL, localPath, ref string) error { + // Remove stale index.lock left by crashed git processes — prevents permanent failure + // Remove stale lock files left by crashed git processes + for _, lockFile := range []string{"index.lock", "HEAD.lock", "config.lock"} { + lockPath := filepath.Join(localPath, ".git", lockFile) + if _, err := os.Stat(lockPath); err == nil { + os.Remove(lockPath) + g.logger.Info("removed stale git lock", "file", lockFile, "path", lockPath) + } + } + cmd := g.gitCommand(ctx, localPath, githubURL, "reset", "--hard", ref) + if out, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("git reset --hard %s: %w\n%s", ref, err, out) + } + cmd = g.gitCommand(ctx, localPath, githubURL, "clean", "-fd") + if out, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("git clean -fd: %w\n%s", err, out) + } + return nil +} + +func (g *gitCloner) validateClone(localPath string) error { + ok, err := hasWorkingTreeFiles(localPath) + if err != nil { + return err + } + if !ok { + return fmt.Errorf("clone at %q has no checked out files", localPath) + } + return nil +} + +func hasWorkingTreeFiles(root string) (bool, error) { + var found bool + stop := errors.New("found working tree file") + err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if path == root { + return nil + } + if info.IsDir() { + if info.Name() == ".git" { + return filepath.SkipDir + } + return nil + } + found = true + return stop + }) + if err != nil && !errors.Is(err, stop) { + return false, err + } + return found, nil +} + +type bridgePoolClient interface { + ServerInfo() mcp.ServerInfo + Call(ctx context.Context, method string, params interface{}) (json.RawMessage, error) + CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) + Close() +} + +var newBridgePoolClient = func(ctx context.Context, binPath string) (bridgePoolClient, error) { + return mcp.NewClient(ctx, binPath) +} + +type mcpBridgeClientPool struct { + binPath string + acquireTimeout time.Duration + mu sync.Mutex + clients chan bridgePoolClient + all []bridgePoolClient + info mcp.ServerInfo +} + +func newMCPBridgeClientPool(ctx context.Context, binPath string, size int, acquireTimeout time.Duration) (*mcpBridgeClientPool, error) { + if size <= 0 { + size = 1 + } + pool := &mcpBridgeClientPool{ + binPath: binPath, + acquireTimeout: acquireTimeout, + clients: make(chan bridgePoolClient, size), + all: make([]bridgePoolClient, 0, size), + } + for i := 0; i < size; i++ { + client, err := newBridgePoolClient(ctx, binPath) + if err != nil { + pool.Close() + return nil, fmt.Errorf("start bridge client %d/%d: %w", i+1, size, err) + } + if i == 0 { + pool.info = client.ServerInfo() + } + pool.all = append(pool.all, client) + pool.clients <- client + } + return pool, nil +} + +func (p *mcpBridgeClientPool) ServerInfo() mcp.ServerInfo { + return p.info +} + +func (p *mcpBridgeClientPool) Close() { + for _, client := range p.all { + client.Close() + } +} + +func (p *mcpBridgeClientPool) borrow(ctx context.Context) (bridgePoolClient, error) { + if p.acquireTimeout <= 0 { + select { + case client := <-p.clients: + return client, nil + case <-ctx.Done(): + return nil, ctx.Err() + } + } + + acquireCtx, cancel := context.WithTimeoutCause(ctx, p.acquireTimeout, bridge.ErrBackendBusy) + defer cancel() + + select { + case client := <-p.clients: + return client, nil + case <-acquireCtx.Done(): + if errors.Is(context.Cause(acquireCtx), bridge.ErrBackendBusy) { + return nil, bridge.ErrBackendBusy + } + return nil, ctx.Err() + } +} + +func (p *mcpBridgeClientPool) release(client bridgePoolClient) { + if client == nil { + return + } + p.clients <- client +} + +// isDeadClientError returns true if the error indicates the C binary subprocess is dead +// (broken pipe, closed stdout). These clients must be retired, not reused. +func isDeadClientError(err error) bool { + if err == nil { + return false + } + msg := err.Error() + return strings.Contains(msg, "broken pipe") || + strings.Contains(msg, "subprocess closed stdout") || + strings.Contains(msg, "mcp: read:") || + strings.Contains(msg, "mcp: send") +} + +func (p *mcpBridgeClientPool) Call(ctx context.Context, method string, params interface{}) (json.RawMessage, error) { + client, err := p.borrow(ctx) + if err != nil { + return nil, err + } + + type callResult struct { + result json.RawMessage + err error + } + + resultCh := make(chan callResult, 1) + go func() { + result, callErr := client.Call(ctx, method, params) + resultCh <- callResult{result: result, err: callErr} + }() + + select { + case out := <-resultCh: + if isDeadClientError(out.err) { + client.Close() + go p.replaceClientAsync(client) + } else { + p.release(client) + } + return out.result, out.err + case <-ctx.Done(): + client.Close() + go p.replaceClientAsync(client) + return nil, ctx.Err() + } +} + +func (p *mcpBridgeClientPool) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + client, err := p.borrow(ctx) + if err != nil { + return nil, err + } + + type toolCallResult struct { + result *mcp.ToolResult + err error + } + + resultCh := make(chan toolCallResult, 1) + go func() { + result, callErr := client.CallTool(ctx, name, params) + resultCh <- toolCallResult{result: result, err: callErr} + }() + + select { + case out := <-resultCh: + if isDeadClientError(out.err) { + client.Close() + go p.replaceClientAsync(client) + } else { + p.release(client) + } + return out.result, out.err + case <-ctx.Done(): + client.Close() + go p.replaceClientAsync(client) + return nil, ctx.Err() + } +} + +func (p *mcpBridgeClientPool) replaceClientAsync(dead bridgePoolClient) { + replacementCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + replacement, err := newBridgePoolClient(replacementCtx, p.binPath) + if err != nil { + slog.Error("failed to replace timed out bridge client", "err", err) + return + } + + p.mu.Lock() + for i, client := range p.all { + if client == dead { + p.all[i] = replacement + break + } + } + p.mu.Unlock() + + p.release(replacement) +} + +type indexToolClient interface { + CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) + Close() +} + +var newIndexToolClient = func(ctx context.Context, binPath string) (indexToolClient, error) { + return mcp.NewClient(ctx, binPath) +} + +type mcpToolClientPool struct { + binPath string + maxUses int + mu sync.Mutex + clients chan indexToolClient + all []indexToolClient + uses map[indexToolClient]int +} + +func newMCPToolClientPool(ctx context.Context, binPath string, size int, maxUses int) (*mcpToolClientPool, error) { + if size <= 0 { + size = 1 + } + pool := &mcpToolClientPool{ + binPath: binPath, + maxUses: maxUses, + clients: make(chan indexToolClient, size), + all: make([]indexToolClient, 0, size), + uses: make(map[indexToolClient]int, size), + } + for i := 0; i < size; i++ { + client, err := newIndexToolClient(ctx, binPath) + if err != nil { + pool.Close() + return nil, fmt.Errorf("start indexer client %d/%d: %w", i+1, size, err) + } + pool.all = append(pool.all, client) + pool.uses[client] = 0 + pool.clients <- client + } + return pool, nil +} + +func (p *mcpToolClientPool) Close() { + for _, client := range p.all { + client.Close() + } +} + +func (p *mcpToolClientPool) borrow(ctx context.Context) (indexToolClient, error) { + select { + case client := <-p.clients: + return client, nil + case <-ctx.Done(): + return nil, ctx.Err() + } +} + +func (p *mcpToolClientPool) release(client indexToolClient) { + if client == nil { + return + } + p.clients <- client +} + +func (p *mcpToolClientPool) retire(client indexToolClient) { + if client == nil { + return + } + client.Close() + go p.replaceClientAsync(client) +} + +func (p *mcpToolClientPool) shouldRecycle(client indexToolClient) bool { + if p.maxUses <= 0 || client == nil { + return false + } + + p.mu.Lock() + defer p.mu.Unlock() + + next := p.uses[client] + 1 + p.uses[client] = next + return next >= p.maxUses +} + +func (p *mcpToolClientPool) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + client, err := p.borrow(ctx) + if err != nil { + return nil, err + } + + type toolCallResult struct { + result *mcp.ToolResult + err error + } + + resultCh := make(chan toolCallResult, 1) + go func() { + result, err := client.CallTool(ctx, name, params) + resultCh <- toolCallResult{result: result, err: err} + }() + + select { + case out := <-resultCh: + if out.err != nil { + p.retire(client) + return nil, out.err + } + if p.shouldRecycle(client) { + p.retire(client) + } else { + p.release(client) + } + return out.result, out.err + case <-ctx.Done(): + p.retire(client) + return nil, ctx.Err() + } +} + +func (p *mcpToolClientPool) replaceClientAsync(dead indexToolClient) { + replacementCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + replacement, err := newIndexToolClient(replacementCtx, p.binPath) + if err != nil { + slog.Error("failed to replace timed out MCP client", "err", err) + return + } + + p.mu.Lock() + delete(p.uses, dead) + for i, client := range p.all { + if client == dead { + p.all[i] = replacement + break + } + } + p.uses[replacement] = 0 + p.mu.Unlock() + + p.release(replacement) +} + +type mcpIndexClientPool struct { + *mcpToolClientPool +} + +func newMCPIndexClientPool(ctx context.Context, binPath string, size int, maxUses int) (*mcpIndexClientPool, error) { + pool, err := newMCPToolClientPool(ctx, binPath, size, maxUses) + if err != nil { + return nil, err + } + return &mcpIndexClientPool{mcpToolClientPool: pool}, nil +} + +func (p *mcpIndexClientPool) IndexRepository(ctx context.Context, repoPath, mode, projectName string) error { + args := map[string]interface{}{ + "repo_path": repoPath, + "mode": mode, + } + if projectName != "" { + args["project"] = projectName + } + result, err := p.CallTool(ctx, "index_repository", args) + if err != nil { + return fmt.Errorf("index_repository: %w", err) + } + if result.IsError { + msg := "index_repository returned error" + if len(result.Content) > 0 { + msg = result.Content[0].Text + } + return fmt.Errorf("index_repository: %s", msg) + } + return nil +} + +type mcpDiscoveryClientPool struct { + *mcpToolClientPool +} + +func newMCPDiscoveryClientPool(ctx context.Context, binPath string, size int) (*mcpDiscoveryClientPool, error) { + pool, err := newMCPToolClientPool(ctx, binPath, size, 0) + if err != nil { + return nil, err + } + return &mcpDiscoveryClientPool{mcpToolClientPool: pool}, nil +} + +type bridgeClient interface { + ServerInfo() mcp.ServerInfo + Call(ctx context.Context, method string, params interface{}) (json.RawMessage, error) + CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) +} + +// mcpBridgeBackend implements bridge.Backend by forwarding to the MCP client. +type mcpBridgeBackend struct { + client bridgeClient + discovery discovery.Service + cache *bridge.SearchCache + cacheDir string // CBM cache dir where per-project .db files live +} + +func (b *mcpBridgeBackend) Call(ctx context.Context, method string, params json.RawMessage) (json.RawMessage, error) { + if b.client == nil { + return nil, bridge.ErrBackendUnavailable + } + + switch method { + case "initialize": + return b.initialize(params) + case "ping": + return json.RawMessage(`{}`), nil + case "tools/list": + // Hard timeout: tools/list should be fast + listCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + raw, err := b.client.Call(listCtx, "tools/list", nil) + if err != nil { + return nil, err + } + raw, err = b.appendDiscoveryTool(raw) + if err != nil { + return nil, err + } + return b.appendCustomerSurfaceTool(raw) + case "tools/call": + var paramMap map[string]interface{} + if len(params) > 0 { + if err := json.Unmarshal(params, ¶mMap); err != nil { + return nil, fmt.Errorf("parse params: %w", err) + } + } + + name, _ := paramMap["name"].(string) + if name == "" { + return nil, errors.New("missing tool name") + } + args, _ := paramMap["arguments"].(map[string]interface{}) + if name == discovery.NewDefinition().Name { + return b.callDiscoveryTool(ctx, args) + } + + // Cache check: return instantly for repeated identical queries. + cacheable := b.cache != nil && (name == "search_code" || name == "search_graph" || name == "get_code_snippet") + var cacheKey string + if cacheable { + cacheKey = b.cache.Key(name, args) + if cached, ok := b.cache.Get(cacheKey); ok { + slog.Debug("search cache hit", "tool", name) + return cached, nil + } + } + + // ── Go-native search_code ── + // Bypass C binary entirely for search_code. The C binary runs `grep -rn` + // on GCS Fuse-mounted repos which is catastrophically slow (minutes for + // 63K-file repos). Our Go path: + // 1. Queries SQLite for the pre-indexed file list (no filesystem walk) + // 2. Reads files in parallel (64 goroutines — saturates GCS Fuse) + // 3. Runs Go regexp (same semantics as grep -E) + // 4. Classifies matches against indexed nodes (same output as C binary) + // Full grep accuracy, never hangs, hard 30s deadline. + if name == "search_code" { + goSearchCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + raw, goErr := b.runGoSearchCode(goSearchCtx, args) + if goErr == nil { + if cacheable { + b.cache.Set(cacheKey, raw) + } + return raw, nil + } + // Log and fall through to C binary as safety net. + slog.Warn("go-native search_code failed, falling back to C binary", "err", goErr) + } + + // ── Go-native customer_surface ── + // Composite enricher: fuses product-area + Vue metadata + FE fetch-call + // extraction into one record per file. No C-binary fallback — this tool + // is Go-only by design (the C binary has no concept of product areas + // or Vue SFC structure). + if name == "customer_surface" { + goCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + return b.runGoCustomerSurface(goCtx, args) + } + + // Hard timeout on every C binary tool call. Without this, hung C + // binaries block the bridge client forever because bufio.Scanner.Scan() + // doesn't respect context cancellation. + toolCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + result, err := b.client.CallTool(toolCtx, name, args) + if err != nil { + return nil, err + } + + raw, err := json.Marshal(result) + if err != nil { + return nil, err + } + + // Cache successful (non-error) results only. + if cacheable && !result.IsError { + b.cache.Set(cacheKey, raw) + } + + return raw, nil + default: + return nil, bridge.ErrMethodNotFound + } +} + +func (b *mcpBridgeBackend) appendDiscoveryTool(raw json.RawMessage) (json.RawMessage, error) { + if b.discovery == nil { + return raw, nil + } + + var payload struct { + Tools []map[string]interface{} `json:"tools"` + } + if err := json.Unmarshal(raw, &payload); err != nil { + return nil, fmt.Errorf("parse tools/list response: %w", err) + } + + def := b.discovery.Definition() + tool := map[string]interface{}{ + "name": def.Name, + "description": def.Description, + "inputSchema": def.InputSchema, + } + payload.Tools = append(payload.Tools, tool) + return json.Marshal(payload) +} + +func (b *mcpBridgeBackend) callDiscoveryTool(ctx context.Context, args map[string]interface{}) (json.RawMessage, error) { + if b.discovery == nil { + return nil, errors.New("discover_projects unavailable") + } + + var req discovery.Request + if args != nil { + rawArgs, err := json.Marshal(args) + if err != nil { + return nil, fmt.Errorf("marshal discover_projects args: %w", err) + } + if err := json.Unmarshal(rawArgs, &req); err != nil { + return nil, fmt.Errorf("parse discover_projects args: %w", err) + } + } + req.Query = strings.TrimSpace(req.Query) + if req.Query == "" { + return nil, errors.New("discover_projects: query is required") + } + if req.Limit <= 0 { + req.Limit = 5 + } + if _, ok := args["include_graph_confidence"]; !ok { + req.IncludeGraphConfidence = true + } + + resp, err := b.discovery.DiscoverProjects(ctx, req) + if err != nil { + return nil, err + } + text, err := json.Marshal(resp) + if err != nil { + return nil, fmt.Errorf("marshal discover_projects response: %w", err) + } + + return json.Marshal(mcp.ToolResult{ + Content: []mcp.Content{{Type: "text", Text: string(text)}}, + IsError: false, + }) +} + +// runGoSearchCode executes search_code entirely in Go — bypasses the C binary. +// See searchtools package for architecture details. +func (b *mcpBridgeBackend) runGoSearchCode(ctx context.Context, args map[string]interface{}) (json.RawMessage, error) { + if b.cacheDir == "" { + return nil, errors.New("cache dir not configured") + } + + // Unmarshal args. + raw, err := json.Marshal(args) + if err != nil { + return nil, fmt.Errorf("marshal args: %w", err) + } + var sargs searchtools.SearchCodeArgs + if err := json.Unmarshal(raw, &sargs); err != nil { + return nil, fmt.Errorf("parse args: %w", err) + } + + result, err := searchtools.HandleSearchCode(ctx, b.cacheDir, sargs) + if err != nil { + return nil, err + } + + // Format as MCP ToolResult (same shape as C binary output). + body, err := json.Marshal(result) + if err != nil { + return nil, fmt.Errorf("marshal search result: %w", err) + } + return json.Marshal(mcp.ToolResult{ + Content: []mcp.Content{{Type: "text", Text: string(body)}}, + IsError: false, + }) +} + +// runGoCustomerSurface executes the customer-surface composite enricher. +// Pure compute path — sources are passed inline by the caller, so no SQLite, +// no GCS Fuse, no filesystem walk. Hot path latency is dominated by regex +// work across the batched files (single-digit ms per file on typical SFCs). +func (b *mcpBridgeBackend) runGoCustomerSurface(ctx context.Context, args map[string]interface{}) (json.RawMessage, error) { + raw, err := json.Marshal(args) + if err != nil { + return nil, fmt.Errorf("marshal args: %w", err) + } + var sargs searchtools.CustomerSurfaceArgs + if err := json.Unmarshal(raw, &sargs); err != nil { + return nil, fmt.Errorf("parse args: %w", err) + } + + result, err := searchtools.HandleCustomerSurface(ctx, sargs) + if err != nil { + return nil, err + } + + body, err := json.Marshal(result) + if err != nil { + return nil, fmt.Errorf("marshal customer-surface result: %w", err) + } + return json.Marshal(mcp.ToolResult{ + Content: []mcp.Content{{Type: "text", Text: string(body)}}, + IsError: false, + }) +} + +// appendCustomerSurfaceTool advertises the customer_surface wrapper-owned +// tool on `tools/list`. The C binary knows nothing about it, so we have to +// inject the definition into the list response before returning to the client. +func (b *mcpBridgeBackend) appendCustomerSurfaceTool(raw json.RawMessage) (json.RawMessage, error) { + var payload struct { + Tools []map[string]interface{} `json:"tools"` + } + if err := json.Unmarshal(raw, &payload); err != nil { + return nil, fmt.Errorf("parse tools/list response: %w", err) + } + payload.Tools = append(payload.Tools, customerSurfaceToolDefinition()) + return json.Marshal(payload) +} + +// customerSurfaceToolDefinition is the MCP JSON-Schema for the customer_surface +// tool. Kept here (not in the searchtools package) to avoid circular imports +// and to mirror the discovery tool registration pattern. +func customerSurfaceToolDefinition() map[string]interface{} { + return map[string]interface{}{ + "name": "customer_surface", + "description": "Fuse product area + Vue component metadata + frontend fetch calls into a " + + "per-file customer-surface record. Used by PR-impact analysis to map changed files " + + "to product areas, pages, and UI components. Pass the repo slug and a list of " + + "(path, source) pairs; returns one surface record per file in input order.", + "inputSchema": map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "repo": map[string]interface{}{ + "type": "string", + "description": "Short repo slug used for product-map lookup (e.g. 'ghl-crm-frontend', 'platform-backend').", + }, + "files": map[string]interface{}{ + "type": "array", + "description": "Files to analyze. Each entry carries a repo-root-relative path and the full source.", + "items": map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string"}, + "source": map[string]interface{}{"type": "string"}, + }, + "required": []string{"path", "source"}, + }, + }, + "product_map_path": map[string]interface{}{ + "type": "string", + "description": "Optional filesystem override of the embedded product map (local dev / tests only).", + }, + }, + "required": []string{"repo", "files"}, + }, + } +} + +func (b *mcpBridgeBackend) initialize(params json.RawMessage) (json.RawMessage, error) { + type initializeParams struct { + ProtocolVersion string `json:"protocolVersion"` + } + type initializeResult struct { + ProtocolVersion string `json:"protocolVersion"` + Capabilities map[string]interface{} `json:"capabilities"` + ServerInfo mcp.ServerInfo `json:"serverInfo"` + } + + version := supportedProtocolVersions[0] + if len(params) > 0 { + var p initializeParams + if err := json.Unmarshal(params, &p); err != nil { + return nil, fmt.Errorf("parse initialize params: %w", err) + } + for _, supported := range supportedProtocolVersions { + if p.ProtocolVersion == supported { + version = supported + break + } + } + } + + return json.Marshal(initializeResult{ + ProtocolVersion: version, + Capabilities: map[string]interface{}{ + "tools": map[string]interface{}{}, + }, + ServerInfo: b.client.ServerInfo(), + }) +} diff --git a/ghl/cmd/server/main_test.go b/ghl/cmd/server/main_test.go new file mode 100644 index 00000000..38193e49 --- /dev/null +++ b/ghl/cmd/server/main_test.go @@ -0,0 +1,797 @@ +package main + +import ( + "context" + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "strings" + "sync" + "sync/atomic" + "testing" + "time" + + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/bridge" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/discovery" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/mcp" +) + +type fakeRequestAuthenticator struct { + token string + calls int +} + +func (f *fakeRequestAuthenticator) Authenticate(_ context.Context, bearerToken string) error { + f.calls++ + if bearerToken != f.token { + return errors.New("unauthorized") + } + return nil +} + +type fakeBridgeClient struct { + info mcp.ServerInfo + callCtx context.Context + callMethod string + callParams interface{} + callResult json.RawMessage + callErr error + toolCtx context.Context + toolName string + toolArgs map[string]interface{} + toolResult *mcp.ToolResult + toolErr error +} + +func (f *fakeBridgeClient) ServerInfo() mcp.ServerInfo { + return f.info +} + +func (f *fakeBridgeClient) Call(ctx context.Context, method string, params interface{}) (json.RawMessage, error) { + f.callCtx = ctx + f.callMethod = method + f.callParams = params + return f.callResult, f.callErr +} + +func (f *fakeBridgeClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + f.toolCtx = ctx + f.toolName = name + f.toolArgs = params + return f.toolResult, f.toolErr +} + +type fakeDiscoverer struct { + definition discovery.ToolDefinition + request discovery.Request + response discovery.Response + err error +} + +func (f *fakeDiscoverer) Definition() discovery.ToolDefinition { + return f.definition +} + +func (f *fakeDiscoverer) DiscoverProjects(_ context.Context, req discovery.Request) (discovery.Response, error) { + f.request = req + return f.response, f.err +} + +func TestMCPBridgeBackendInitializeNegotiatesProtocol(t *testing.T) { + backend := &mcpBridgeBackend{ + client: &fakeBridgeClient{ + info: mcp.ServerInfo{Name: "codebase-memory-mcp", Version: "0.10.0"}, + }, + } + + raw, err := backend.Call(context.Background(), "initialize", json.RawMessage(`{"protocolVersion":"2025-03-26"}`)) + if err != nil { + t.Fatalf("initialize: %v", err) + } + + var result struct { + ProtocolVersion string `json:"protocolVersion"` + Capabilities map[string]interface{} `json:"capabilities"` + ServerInfo mcp.ServerInfo `json:"serverInfo"` + } + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("parse initialize result: %v", err) + } + + if result.ProtocolVersion != "2025-03-26" { + t.Errorf("protocolVersion: want 2025-03-26, got %q", result.ProtocolVersion) + } + if result.ServerInfo.Version != "0.10.0" { + t.Errorf("server version: want 0.10.0, got %q", result.ServerInfo.Version) + } + if _, ok := result.Capabilities["tools"]; !ok { + t.Errorf("capabilities.tools: expected tools capability") + } +} + +func TestMCPBridgeBackendForwardsToolsList(t *testing.T) { + client := &fakeBridgeClient{ + callResult: json.RawMessage(`{"tools":[{"name":"list_projects"}]}`), + } + backend := &mcpBridgeBackend{client: client} + + raw, err := backend.Call(context.Background(), "tools/list", nil) + if err != nil { + t.Fatalf("tools/list: %v", err) + } + + if client.callMethod != "tools/list" { + t.Errorf("call method: want tools/list, got %q", client.callMethod) + } + if client.callCtx == nil { + t.Error("call ctx: expected non-nil context") + } + // The bridge always appends the wrapper-owned customer_surface tool even + // when no discovery service is configured. Confirm the upstream tool is + // still present and the injected tool is appended after it. + var result struct { + Tools []struct { + Name string `json:"name"` + } `json:"tools"` + } + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("parse tools/list result: %v", err) + } + if len(result.Tools) != 2 { + t.Fatalf("tools count: want 2 (list_projects + customer_surface), got %d — raw=%s", len(result.Tools), raw) + } + if result.Tools[0].Name != "list_projects" { + t.Errorf("first tool: want list_projects, got %q", result.Tools[0].Name) + } + if result.Tools[1].Name != "customer_surface" { + t.Errorf("second tool: want customer_surface, got %q", result.Tools[1].Name) + } +} + +func TestMCPBridgeBackendToolsListIncludesDiscoverProjects(t *testing.T) { + client := &fakeBridgeClient{ + callResult: json.RawMessage(`{"tools":[{"name":"list_projects"}]}`), + } + backend := &mcpBridgeBackend{ + client: client, + discovery: &fakeDiscoverer{ + definition: discovery.ToolDefinition{ + Name: "discover_projects", + Description: "Discover likely repos", + InputSchema: map[string]interface{}{"type": "object"}, + }, + }, + } + + raw, err := backend.Call(context.Background(), "tools/list", nil) + if err != nil { + t.Fatalf("tools/list: %v", err) + } + + var result struct { + Tools []struct { + Name string `json:"name"` + } `json:"tools"` + } + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("parse tools/list result: %v", err) + } + + // Expected order: upstream tools, then wrapper-owned discover_projects, + // then wrapper-owned customer_surface. + if len(result.Tools) != 3 { + t.Fatalf("tools count: want 3 (list_projects + discover_projects + customer_surface), got %d", len(result.Tools)) + } + if result.Tools[0].Name != "list_projects" { + t.Fatalf("first tool: want list_projects, got %q", result.Tools[0].Name) + } + if result.Tools[1].Name != "discover_projects" { + t.Fatalf("second tool: want discover_projects, got %q", result.Tools[1].Name) + } + if result.Tools[2].Name != "customer_surface" { + t.Fatalf("third tool: want customer_surface, got %q", result.Tools[2].Name) + } +} + +func TestMCPBridgeBackendForwardsToolsCall(t *testing.T) { + client := &fakeBridgeClient{ + toolResult: &mcp.ToolResult{ + Content: []mcp.Content{{Type: "text", Text: "ok"}}, + }, + } + backend := &mcpBridgeBackend{client: client} + + raw, err := backend.Call(context.Background(), "tools/call", json.RawMessage(`{"name":"list_projects","arguments":{"project":"demo"}}`)) + if err != nil { + t.Fatalf("tools/call: %v", err) + } + + if client.toolName != "list_projects" { + t.Errorf("tool name: want list_projects, got %q", client.toolName) + } + if client.toolCtx == nil { + t.Error("tool ctx: expected non-nil context") + } + if got := client.toolArgs["project"]; got != "demo" { + t.Errorf("tool args.project: want demo, got %v", got) + } + if string(raw) != `{"content":[{"type":"text","text":"ok"}],"isError":false}` { + t.Errorf("raw result: got %s", raw) + } +} + +func TestMCPBridgeBackendHandlesDiscoverProjects(t *testing.T) { + backend := &mcpBridgeBackend{ + client: &fakeBridgeClient{}, + discovery: &fakeDiscoverer{ + response: discovery.Response{ + Query: "membership checkout lock", + PrimaryRepos: []discovery.Candidate{ + {Project: "app-fleet-cache-membership-backend", RepoSlug: "membership-backend"}, + }, + }, + }, + } + + raw, err := backend.Call(context.Background(), "tools/call", json.RawMessage(`{"name":"discover_projects","arguments":{"query":"membership checkout lock","limit":3}}`)) + if err != nil { + t.Fatalf("tools/call discover_projects: %v", err) + } + + var result struct { + Content []struct { + Type string `json:"type"` + Text string `json:"text"` + } `json:"content"` + IsError bool `json:"isError"` + } + if err := json.Unmarshal(raw, &result); err != nil { + t.Fatalf("parse discover_projects result: %v", err) + } + if result.IsError { + t.Fatal("discover_projects result unexpectedly marked as error") + } + if len(result.Content) != 1 { + t.Fatalf("content count: want 1, got %d", len(result.Content)) + } + + var payload discovery.Response + if err := json.Unmarshal([]byte(result.Content[0].Text), &payload); err != nil { + t.Fatalf("parse discover_projects payload: %v", err) + } + if payload.Query != "membership checkout lock" { + t.Fatalf("query: want %q, got %q", "membership checkout lock", payload.Query) + } + if len(payload.PrimaryRepos) != 1 || payload.PrimaryRepos[0].RepoSlug != "membership-backend" { + t.Fatalf("unexpected primary repos: %+v", payload.PrimaryRepos) + } +} + +func TestMCPBridgeBackendRejectsUnknownMethod(t *testing.T) { + backend := &mcpBridgeBackend{client: &fakeBridgeClient{}} + + _, err := backend.Call(context.Background(), "resources/list", nil) + if err == nil { + t.Fatal("expected error for unknown method") + } + if err != bridge.ErrMethodNotFound { + t.Fatalf("want ErrMethodNotFound, got %v", err) + } +} + +func TestMakeAuthMiddlewareUsesAuthenticatorWhenConfigured(t *testing.T) { + auth := &fakeRequestAuthenticator{token: "ghp-valid"} + handler := makeAuthMiddleware("legacy-token", auth)(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusAccepted) + }) + + req := httptest.NewRequest(http.MethodGet, "/status", nil) + req.Header.Set("Authorization", "Bearer ghp-valid") + rr := httptest.NewRecorder() + handler(rr, req) + + if rr.Code != http.StatusAccepted { + t.Fatalf("status: want %d, got %d", http.StatusAccepted, rr.Code) + } + if auth.calls != 1 { + t.Fatalf("auth calls: want 1, got %d", auth.calls) + } +} + +func TestMakeAuthMiddlewareRejectsLegacyBearerWhenAuthenticatorConfigured(t *testing.T) { + auth := &fakeRequestAuthenticator{token: "ghp-valid"} + handler := makeAuthMiddleware("legacy-token", auth)(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusAccepted) + }) + + req := httptest.NewRequest(http.MethodGet, "/status", nil) + req.Header.Set("Authorization", "Bearer legacy-token") + rr := httptest.NewRecorder() + handler(rr, req) + + if rr.Code != http.StatusUnauthorized { + t.Fatalf("status: want %d, got %d", http.StatusUnauthorized, rr.Code) + } + if auth.calls != 1 { + t.Fatalf("auth calls: want 1, got %d", auth.calls) + } +} + +func TestMakeAuthMiddlewareFallsBackToStaticBearerToken(t *testing.T) { + handler := makeAuthMiddleware("legacy-token", nil)(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusAccepted) + }) + + req := httptest.NewRequest(http.MethodGet, "/status", nil) + req.Header.Set("Authorization", "Bearer legacy-token") + rr := httptest.NewRecorder() + handler(rr, req) + + if rr.Code != http.StatusAccepted { + t.Fatalf("status: want %d, got %d", http.StatusAccepted, rr.Code) + } +} + +type fakeIndexToolClient struct { + inFlight *atomic.Int64 + maxFlight *atomic.Int64 + delay time.Duration + toolErr error + result *mcp.ToolResult +} + +func (f *fakeIndexToolClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + if name != "index_repository" { + return nil, errors.New("unexpected tool") + } + current := f.inFlight.Add(1) + for { + old := f.maxFlight.Load() + if current <= old || f.maxFlight.CompareAndSwap(old, current) { + break + } + } + defer f.inFlight.Add(-1) + + if f.delay > 0 { + select { + case <-time.After(f.delay): + case <-ctx.Done(): + return nil, ctx.Err() + } + } + if f.toolErr != nil { + return nil, f.toolErr + } + if f.result != nil { + return f.result, nil + } + return &mcp.ToolResult{}, nil +} + +func (f *fakeIndexToolClient) Close() {} + +type blockingToolClient struct { + started chan struct{} + closed chan struct{} + once sync.Once +} + +func newBlockingToolClient() *blockingToolClient { + return &blockingToolClient{ + started: make(chan struct{}), + closed: make(chan struct{}), + } +} + +func (f *blockingToolClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + close(f.started) + select { + case <-f.closed: + return nil, context.DeadlineExceeded + case <-ctx.Done(): + return nil, ctx.Err() + } +} + +func (f *blockingToolClient) Close() { + f.once.Do(func() { + close(f.closed) + }) +} + +type fastToolClient struct { + result *mcp.ToolResult +} + +func (f *fastToolClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + if f.result != nil { + return f.result, nil + } + return &mcp.ToolResult{}, nil +} + +func (f *fastToolClient) Close() {} + +type failingToolClient struct { + err error +} + +func (f *failingToolClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + return nil, f.err +} + +func (f *failingToolClient) Close() {} + +type blockingBridgeClient struct { + info mcp.ServerInfo + started chan struct{} + once sync.Once +} + +func newBlockingBridgeClient() *blockingBridgeClient { + return &blockingBridgeClient{ + info: mcp.ServerInfo{Name: "codebase-memory-mcp", Version: "test"}, + started: make(chan struct{}), + } +} + +func (f *blockingBridgeClient) ServerInfo() mcp.ServerInfo { + return f.info +} + +func (f *blockingBridgeClient) Call(ctx context.Context, method string, params interface{}) (json.RawMessage, error) { + f.once.Do(func() { close(f.started) }) + <-ctx.Done() + return nil, ctx.Err() +} + +func (f *blockingBridgeClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + f.once.Do(func() { close(f.started) }) + <-ctx.Done() + return nil, ctx.Err() +} + +func (f *blockingBridgeClient) Close() {} + +type fastBridgeClient struct { + info mcp.ServerInfo + result json.RawMessage +} + +func (f *fastBridgeClient) ServerInfo() mcp.ServerInfo { + return f.info +} + +func (f *fastBridgeClient) Call(ctx context.Context, method string, params interface{}) (json.RawMessage, error) { + if f.result != nil { + return f.result, nil + } + return json.RawMessage(`{}`), nil +} + +func (f *fastBridgeClient) CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + return &mcp.ToolResult{}, nil +} + +func (f *fastBridgeClient) Close() {} + +func TestMCPIndexClientPoolRunsConcurrentIndexing(t *testing.T) { + var inFlight atomic.Int64 + var maxFlight atomic.Int64 + + prevFactory := newIndexToolClient + newIndexToolClient = func(ctx context.Context, binPath string) (indexToolClient, error) { + return &fakeIndexToolClient{ + inFlight: &inFlight, + maxFlight: &maxFlight, + delay: 20 * time.Millisecond, + }, nil + } + defer func() { newIndexToolClient = prevFactory }() + + pool, err := newMCPIndexClientPool(context.Background(), "/tmp/cbm", 3, 0) + if err != nil { + t.Fatalf("newMCPIndexClientPool: %v", err) + } + defer pool.Close() + + errCh := make(chan error, 6) + for i := 0; i < 6; i++ { + go func() { + errCh <- pool.IndexRepository(context.Background(), "/tmp/repo", "moderate", "") + }() + } + for i := 0; i < 6; i++ { + if err := <-errCh; err != nil { + t.Fatalf("IndexRepository: %v", err) + } + } + + if got := maxFlight.Load(); got < 2 { + t.Fatalf("max concurrent workers: want >= 2, got %d", got) + } + if got := maxFlight.Load(); got > 3 { + t.Fatalf("max concurrent workers: want <= 3, got %d", got) + } +} + +func TestMCPIndexClientPoolPropagatesToolErrors(t *testing.T) { + prevFactory := newIndexToolClient + newIndexToolClient = func(ctx context.Context, binPath string) (indexToolClient, error) { + return &fakeIndexToolClient{ + inFlight: &atomic.Int64{}, + maxFlight: &atomic.Int64{}, + result: &mcp.ToolResult{ + IsError: true, + Content: []mcp.Content{{Type: "text", Text: "bad repo"}}, + }, + }, nil + } + defer func() { newIndexToolClient = prevFactory }() + + pool, err := newMCPIndexClientPool(context.Background(), "/tmp/cbm", 1, 0) + if err != nil { + t.Fatalf("newMCPIndexClientPool: %v", err) + } + defer pool.Close() + + err = pool.IndexRepository(context.Background(), "/tmp/repo", "full", "") + if err == nil { + t.Fatal("expected tool error") + } + if got := err.Error(); got != "index_repository: bad repo" { + t.Fatalf("unexpected error: %s", got) + } +} + +func TestMCPToolClientPoolReplacesTimedOutClient(t *testing.T) { + blocking := newBlockingToolClient() + replacement := &fastToolClient{ + result: &mcp.ToolResult{Content: []mcp.Content{{Type: "text", Text: "ok"}}}, + } + + var factoryCalls atomic.Int64 + prevFactory := newIndexToolClient + newIndexToolClient = func(ctx context.Context, binPath string) (indexToolClient, error) { + switch factoryCalls.Add(1) { + case 1: + return blocking, nil + case 2: + return replacement, nil + default: + return &fastToolClient{ + result: &mcp.ToolResult{Content: []mcp.Content{{Type: "text", Text: "ok"}}}, + }, nil + } + } + defer func() { newIndexToolClient = prevFactory }() + + pool, err := newMCPToolClientPool(context.Background(), "/tmp/cbm", 1, 0) + if err != nil { + t.Fatalf("newMCPToolClientPool: %v", err) + } + defer pool.Close() + + select { + case <-blocking.started: + default: + } + + ctx, cancel := context.WithTimeout(context.Background(), 20*time.Millisecond) + defer cancel() + start := time.Now() + _, err = pool.CallTool(ctx, "search_graph", map[string]interface{}{"project": "demo"}) + if !errors.Is(err, context.DeadlineExceeded) { + t.Fatalf("expected context deadline exceeded, got %v", err) + } + if elapsed := time.Since(start); elapsed > 500*time.Millisecond { + t.Fatalf("timed out call returned too slowly: %s", elapsed) + } + + result, err := pool.CallTool(context.Background(), "search_graph", map[string]interface{}{"project": "demo"}) + if err != nil { + t.Fatalf("replacement client call failed: %v", err) + } + if len(result.Content) != 1 || result.Content[0].Text != "ok" { + t.Fatalf("unexpected replacement result: %+v", result) + } + if got := factoryCalls.Load(); got < 2 { + t.Fatalf("expected replacement factory call, got %d", got) + } +} + +func TestMCPToolClientPoolReplacesErroredClient(t *testing.T) { + failing := &failingToolClient{err: errors.New("write |1: broken pipe")} + replacement := &fastToolClient{ + result: &mcp.ToolResult{Content: []mcp.Content{{Type: "text", Text: "ok"}}}, + } + + var factoryCalls atomic.Int64 + prevFactory := newIndexToolClient + newIndexToolClient = func(ctx context.Context, binPath string) (indexToolClient, error) { + switch factoryCalls.Add(1) { + case 1: + return failing, nil + case 2: + return replacement, nil + default: + return &fastToolClient{ + result: &mcp.ToolResult{Content: []mcp.Content{{Type: "text", Text: "ok"}}}, + }, nil + } + } + defer func() { newIndexToolClient = prevFactory }() + + pool, err := newMCPToolClientPool(context.Background(), "/tmp/cbm", 1, 0) + if err != nil { + t.Fatalf("newMCPToolClientPool: %v", err) + } + defer pool.Close() + + _, err = pool.CallTool(context.Background(), "index_repository", map[string]interface{}{"repo_path": "/tmp/repo"}) + if err == nil || !strings.Contains(err.Error(), "broken pipe") { + t.Fatalf("expected broken pipe error, got %v", err) + } + + result, err := pool.CallTool(context.Background(), "index_repository", map[string]interface{}{"repo_path": "/tmp/repo"}) + if err != nil { + t.Fatalf("replacement client call failed: %v", err) + } + if len(result.Content) != 1 || result.Content[0].Text != "ok" { + t.Fatalf("unexpected replacement result: %+v", result) + } + if got := factoryCalls.Load(); got < 2 { + t.Fatalf("expected replacement factory call, got %d", got) + } +} + +func TestMCPToolClientPoolRecyclesClientAfterMaxUses(t *testing.T) { + var factoryCalls atomic.Int64 + prevFactory := newIndexToolClient + newIndexToolClient = func(ctx context.Context, binPath string) (indexToolClient, error) { + switch factoryCalls.Add(1) { + case 1: + return &fastToolClient{ + result: &mcp.ToolResult{Content: []mcp.Content{{Type: "text", Text: "first"}}}, + }, nil + default: + return &fastToolClient{ + result: &mcp.ToolResult{Content: []mcp.Content{{Type: "text", Text: "second"}}}, + }, nil + } + } + defer func() { newIndexToolClient = prevFactory }() + + pool, err := newMCPToolClientPool(context.Background(), "/tmp/cbm", 1, 1) + if err != nil { + t.Fatalf("newMCPToolClientPool: %v", err) + } + defer pool.Close() + + first, err := pool.CallTool(context.Background(), "index_repository", map[string]interface{}{"repo_path": "/tmp/repo"}) + if err != nil { + t.Fatalf("first CallTool: %v", err) + } + if len(first.Content) != 1 || first.Content[0].Text != "first" { + t.Fatalf("unexpected first result: %+v", first) + } + + second, err := pool.CallTool(context.Background(), "index_repository", map[string]interface{}{"repo_path": "/tmp/repo"}) + if err != nil { + t.Fatalf("second CallTool: %v", err) + } + if len(second.Content) != 1 || second.Content[0].Text != "second" { + t.Fatalf("unexpected second result: %+v", second) + } + if got := factoryCalls.Load(); got < 2 { + t.Fatalf("expected recycled client, factory calls=%d", got) + } +} + +func TestProjectNameFromPath(t *testing.T) { + cases := map[string]string{ + "/tmp/fleet-cache/platform-backend": "tmp-fleet-cache-platform-backend", + "/tmp//fleet-cache//platform-backend/": "tmp-fleet-cache-platform-backend", + "C:/tmp/fleet-cache/platform-backend": "C-tmp-fleet-cache-platform-backend", + "": "root", + "/": "root", + } + + for input, want := range cases { + if got := projectNameFromPath(input); got != want { + t.Fatalf("projectNameFromPath(%q): want %q, got %q", input, want, got) + } + } +} + +func TestMCPBridgeClientPoolReturnsBusyWhenAcquireTimesOut(t *testing.T) { + blocking := newBlockingBridgeClient() + + prevFactory := newBridgePoolClient + newBridgePoolClient = func(ctx context.Context, binPath string) (bridgePoolClient, error) { + return blocking, nil + } + defer func() { newBridgePoolClient = prevFactory }() + + pool, err := newMCPBridgeClientPool(context.Background(), "/tmp/cbm", 1, 10*time.Millisecond) + if err != nil { + t.Fatalf("newMCPBridgeClientPool: %v", err) + } + defer pool.Close() + + firstCtx, firstCancel := context.WithCancel(context.Background()) + defer firstCancel() + + errCh := make(chan error, 1) + go func() { + _, callErr := pool.Call(firstCtx, "tools/list", nil) + errCh <- callErr + }() + + select { + case <-blocking.started: + case <-time.After(time.Second): + t.Fatal("first bridge call did not start") + } + + start := time.Now() + _, err = pool.Call(context.Background(), "tools/list", nil) + if !errors.Is(err, bridge.ErrBackendBusy) { + t.Fatalf("expected ErrBackendBusy, got %v", err) + } + if elapsed := time.Since(start); elapsed > 500*time.Millisecond { + t.Fatalf("busy call returned too slowly: %s", elapsed) + } + + firstCancel() + if callErr := <-errCh; !errors.Is(callErr, context.Canceled) { + t.Fatalf("expected first call to be canceled, got %v", callErr) + } +} + +func TestIsGitHubHTTPSAuthError(t *testing.T) { + if !isGitHubHTTPSAuthError("fatal: could not read Username for 'https://github.com': No such device or address") { + t.Fatal("expected GitHub HTTPS auth error to be detected") + } + if isGitHubHTTPSAuthError("fatal: some other git failure") { + t.Fatal("unexpected auth error match") + } +} + +func TestHasWorkingTreeFilesRejectsGitOnlyClone(t *testing.T) { + root := t.TempDir() + if err := os.Mkdir(filepath.Join(root, ".git"), 0o755); err != nil { + t.Fatalf("mkdir .git: %v", err) + } + + ok, err := hasWorkingTreeFiles(root) + if err != nil { + t.Fatalf("hasWorkingTreeFiles: %v", err) + } + if ok { + t.Fatal("expected git-only directory to be rejected") + } +} + +func TestHasWorkingTreeFilesAcceptsCheckedOutFile(t *testing.T) { + root := t.TempDir() + if err := os.Mkdir(filepath.Join(root, ".git"), 0o755); err != nil { + t.Fatalf("mkdir .git: %v", err) + } + if err := os.WriteFile(filepath.Join(root, "package.json"), []byte("{}"), 0o644); err != nil { + t.Fatalf("write package.json: %v", err) + } + + ok, err := hasWorkingTreeFiles(root) + if err != nil { + t.Fatalf("hasWorkingTreeFiles: %v", err) + } + if !ok { + t.Fatal("expected checked out file to be accepted") + } +} + diff --git a/ghl/go.mod b/ghl/go.mod new file mode 100644 index 00000000..ba50de06 --- /dev/null +++ b/ghl/go.mod @@ -0,0 +1,67 @@ +module github.com/GoHighLevel/codebase-memory-mcp/ghl + +go 1.25.0 + +require ( + cloud.google.com/go/storage v1.62.1 + github.com/go-chi/chi/v5 v5.2.5 + github.com/robfig/cron/v3 v3.0.1 + golang.org/x/sync v0.20.0 + google.golang.org/api v0.276.0 + gopkg.in/yaml.v3 v3.0.1 + modernc.org/sqlite v1.49.0 +) + +require ( + cel.dev/expr v0.25.1 // indirect + cloud.google.com/go v0.123.0 // indirect + cloud.google.com/go/auth v0.20.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.9.0 // indirect + cloud.google.com/go/iam v1.7.0 // indirect + cloud.google.com/go/monitoring v1.24.3 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.31.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.55.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.55.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cncf/xds/go v0.0.0-20251210132809-ee656c7534f5 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/envoyproxy/go-control-plane/envoy v1.36.0 // indirect + github.com/envoyproxy/protoc-gen-validate v1.3.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-jose/go-jose/v4 v4.1.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.14 // indirect + github.com/googleapis/gax-go/v2 v2.21.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/ncruces/go-strftime v1.0.0 // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect + github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.39.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.67.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 // indirect + go.opentelemetry.io/otel v1.43.0 // indirect + go.opentelemetry.io/otel/metric v1.43.0 // indirect + go.opentelemetry.io/otel/sdk v1.43.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.43.0 // indirect + go.opentelemetry.io/otel/trace v1.43.0 // indirect + golang.org/x/crypto v0.49.0 // indirect + golang.org/x/net v0.52.0 // indirect + golang.org/x/oauth2 v0.36.0 // indirect + golang.org/x/sys v0.42.0 // indirect + golang.org/x/text v0.35.0 // indirect + golang.org/x/time v0.15.0 // indirect + google.golang.org/genproto v0.0.0-20260319201613-d00831a3d3e7 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9 // indirect + google.golang.org/grpc v1.80.0 // indirect + google.golang.org/protobuf v1.36.11 // indirect + modernc.org/libc v1.72.0 // indirect + modernc.org/mathutil v1.7.1 // indirect + modernc.org/memory v1.11.0 // indirect +) diff --git a/ghl/go.sum b/ghl/go.sum new file mode 100644 index 00000000..833f2300 --- /dev/null +++ b/ghl/go.sum @@ -0,0 +1,183 @@ +cel.dev/expr v0.25.1 h1:1KrZg61W6TWSxuNZ37Xy49ps13NUovb66QLprthtwi4= +cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= +cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE= +cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU= +cloud.google.com/go/auth v0.20.0 h1:kXTssoVb4azsVDoUiF8KvxAqrsQcQtB53DcSgta74CA= +cloud.google.com/go/auth v0.20.0/go.mod h1:942/yi/itH1SsmpyrbnTMDgGfdy2BUqIKyd0cyYLc5Q= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs= +cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= +cloud.google.com/go/iam v1.7.0 h1:JD3zh0C6LHl16aCn5Akff0+GELdp1+4hmh6ndoFLl8U= +cloud.google.com/go/iam v1.7.0/go.mod h1:tetWZW1PD/m6vcuY2Zj/aU0eCHNPuxedbnbRTyKXvdY= +cloud.google.com/go/logging v1.13.2 h1:qqlHCBvieJT9Cdq4QqYx1KPadCQ2noD4FK02eNqHAjA= +cloud.google.com/go/logging v1.13.2/go.mod h1:zaybliM3yun1J8mU2dVQ1/qDzjbOqEijZCn6hSBtKak= +cloud.google.com/go/longrunning v0.9.0 h1:0EzbDEGsAvOZNbqXopgniY0w0a1phvu5IdUFq8grmqY= +cloud.google.com/go/longrunning v0.9.0/go.mod h1:pkTz846W7bF4o2SzdWJ40Hu0Re+UoNT6Q5t+igIcb8E= +cloud.google.com/go/monitoring v1.24.3 h1:dde+gMNc0UhPZD1Azu6at2e79bfdztVDS5lvhOdsgaE= +cloud.google.com/go/monitoring v1.24.3/go.mod h1:nYP6W0tm3N9H/bOw8am7t62YTzZY+zUeQ+Bi6+2eonI= +cloud.google.com/go/storage v1.62.1 h1:Os0G3XbUbjZumkpDUf2Y0rLoXJTCF1kU2kWUujKYXD8= +cloud.google.com/go/storage v1.62.1/go.mod h1:cpYz/kRVZ+UQAF1uHeea10/9ewcRbxGoGNKsS9daSXA= +cloud.google.com/go/trace v1.11.7 h1:kDNDX8JkaAG3R2nq1lIdkb7FCSi1rCmsEtKVsty7p+U= +cloud.google.com/go/trace v1.11.7/go.mod h1:TNn9d5V3fQVf6s4SCveVMIBS2LJUqo73GACmq/Tky0s= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.31.0 h1:DHa2U07rk8syqvCge0QIGMCE1WxGj9njT44GH7zNJLQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.31.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.55.0 h1:UnDZ/zFfG1JhH/DqxIZYU/1CUAlTUScoXD/LcM2Ykk8= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.55.0/go.mod h1:IA1C1U7jO/ENqm/vhi7V9YYpBsp+IMyqNrEN94N7tVc= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.55.0 h1:7t/qx5Ost0s0wbA/VDrByOooURhp+ikYwv20i9Y07TQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.55.0/go.mod h1:vB2GH9GAYYJTO3mEn8oYwzEdhlayZIdQz6zdzgUIRvA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.55.0 h1:0s6TxfCu2KHkkZPnBfsQ2y5qia0jl3MMrmBhu3nCOYk= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.55.0/go.mod h1:Mf6O40IAyB9zR/1J8nGDDPirZQQPbYJni8Yisy7NTMc= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cncf/xds/go v0.0.0-20251210132809-ee656c7534f5 h1:6xNmx7iTtyBRev0+D/Tv1FZd4SCg8axKApyNyRsAt/w= +github.com/cncf/xds/go v0.0.0-20251210132809-ee656c7534f5/go.mod h1:KdCmV+x/BuvyMxRnYBlmVaq4OLiKW6iRQfvC62cvdkI= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/envoyproxy/go-control-plane v0.14.0 h1:hbG2kr4RuFj222B6+7T83thSPqLjwBIfQawTkC++2HA= +github.com/envoyproxy/go-control-plane v0.14.0/go.mod h1:NcS5X47pLl/hfqxU70yPwL9ZMkUlwlKxtAohpi2wBEU= +github.com/envoyproxy/go-control-plane/envoy v1.36.0 h1:yg/JjO5E7ubRyKX3m07GF3reDNEnfOboJ0QySbH736g= +github.com/envoyproxy/go-control-plane/envoy v1.36.0/go.mod h1:ty89S1YCCVruQAm9OtKeEkQLTb+Lkz0k8v9W0Oxsv98= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= +github.com/envoyproxy/protoc-gen-validate v1.3.0 h1:TvGH1wof4H33rezVKWSpqKz5NXWg5VPuZ0uONDT6eb4= +github.com/envoyproxy/protoc-gen-validate v1.3.0/go.mod h1:HvYl7zwPa5mffgyeTUHA9zHIH36nmrm7oCbo4YKoSWA= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug= +github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0= +github.com/go-jose/go-jose/v4 v4.1.4 h1:moDMcTHmvE6Groj34emNPLs/qtYXRVcd6S7NHbHz3kA= +github.com/go-jose/go-jose/v4 v4.1.4/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= +github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs= +github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.14 h1:yh8ncqsbUY4shRD5dA6RlzjJaT4hi3kII+zYw8wmLb8= +github.com/googleapis/enterprise-certificate-proxy v0.3.14/go.mod h1:vqVt9yG9480NtzREnTlmGSBmFrA+bzb0yl0TxoBQXOg= +github.com/googleapis/gax-go/v2 v2.21.0 h1:h45NjjzEO3faG9Lg/cFrBh2PgegVVgzqKzuZl/wMbiI= +github.com/googleapis/gax-go/v2 v2.21.0/go.mod h1:But/NJU6TnZsrLai/xBAQLLz+Hc7fHZJt/hsCz3Fih4= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w= +github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo= +github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/detectors/gcp v1.39.0 h1:kWRNZMsfBHZ+uHjiH4y7Etn2FK26LAGkNFw7RHv1DhE= +go.opentelemetry.io/contrib/detectors/gcp v1.39.0/go.mod h1:t/OGqzHBa5v6RHZwrDBJ2OirWc+4q/w2fTbLZwAKjTk= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.67.0 h1:yI1/OhfEPy7J9eoa6Sj051C7n5dvpj0QX8g4sRchg04= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.67.0/go.mod h1:NoUCKYWK+3ecatC4HjkRktREheMeEtrXoQxrqYFeHSc= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 h1:OyrsyzuttWTSur2qN/Lm0m2a8yqyIjUVBZcxFPuXq2o= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0/go.mod h1:C2NGBr+kAB4bk3xtMXfZ94gqFDtg/GkI7e9zqGh5Beg= +go.opentelemetry.io/otel v1.43.0 h1:mYIM03dnh5zfN7HautFE4ieIig9amkNANT+xcVxAj9I= +go.opentelemetry.io/otel v1.43.0/go.mod h1:JuG+u74mvjvcm8vj8pI5XiHy1zDeoCS2LB1spIq7Ay0= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.43.0 h1:TC+BewnDpeiAmcscXbGMfxkO+mwYUwE/VySwvw88PfA= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.43.0/go.mod h1:J/ZyF4vfPwsSr9xJSPyQ4LqtcTPULFR64KwTikGLe+A= +go.opentelemetry.io/otel/metric v1.43.0 h1:d7638QeInOnuwOONPp4JAOGfbCEpYb+K6DVWvdxGzgM= +go.opentelemetry.io/otel/metric v1.43.0/go.mod h1:RDnPtIxvqlgO8GRW18W6Z/4P462ldprJtfxHxyKd2PY= +go.opentelemetry.io/otel/sdk v1.43.0 h1:pi5mE86i5rTeLXqoF/hhiBtUNcrAGHLKQdhg4h4V9Dg= +go.opentelemetry.io/otel/sdk v1.43.0/go.mod h1:P+IkVU3iWukmiit/Yf9AWvpyRDlUeBaRg6Y+C58QHzg= +go.opentelemetry.io/otel/sdk/metric v1.43.0 h1:S88dyqXjJkuBNLeMcVPRFXpRw2fuwdvfCGLEo89fDkw= +go.opentelemetry.io/otel/sdk/metric v1.43.0/go.mod h1:C/RJtwSEJ5hzTiUz5pXF1kILHStzb9zFlIEe85bhj6A= +go.opentelemetry.io/otel/trace v1.43.0 h1:BkNrHpup+4k4w+ZZ86CZoHHEkohws8AY+WTX09nk+3A= +go.opentelemetry.io/otel/trace v1.43.0/go.mod h1:/QJhyVBUUswCphDVxq+8mld+AvhXZLhe+8WVFxiFff0= +golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4= +golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA= +golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= +golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= +golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0= +golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw= +golang.org/x/oauth2 v0.36.0 h1:peZ/1z27fi9hUOFCAZaHyrpWG5lwe0RJEEEeH0ThlIs= +golang.org/x/oauth2 v0.36.0/go.mod h1:YDBUJMTkDnJS+A4BP4eZBjCqtokkg1hODuPjwiGPO7Q= +golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4= +golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo= +golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= +golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8= +golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA= +golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U= +golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno= +golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k= +golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0= +gonum.org/v1/gonum v0.17.0 h1:VbpOemQlsSMrYmn7T2OUvQ4dqxQXU+ouZFQsZOx50z4= +gonum.org/v1/gonum v0.17.0/go.mod h1:El3tOrEuMpv2UdMrbNlKEh9vd86bmQ6vqIcDwxEOc1E= +google.golang.org/api v0.276.0 h1:nVArUtfLEihtW+b0DdcqRGK1xoEm2+ltAihyztq7MKY= +google.golang.org/api v0.276.0/go.mod h1:Fnag/EWUPIcJXuIkP1pjoTgS5vdxlk3eeemL7Do6bvw= +google.golang.org/genproto v0.0.0-20260319201613-d00831a3d3e7 h1:XzmzkmB14QhVhgnawEVsOn6OFsnpyxNPRY9QV01dNB0= +google.golang.org/genproto v0.0.0-20260319201613-d00831a3d3e7/go.mod h1:L43LFes82YgSonw6iTXTxXUX1OlULt4AQtkik4ULL/I= +google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9 h1:VPWxll4HlMw1Vs/qXtN7BvhZqsS9cdAittCNvVENElA= +google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:7QBABkRtR8z+TEnmXTqIqwJLlzrZKVfAUm7tY3yGv0M= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9 h1:m8qni9SQFH0tJc1X0vmnpw/0t+AImlSvp30sEupozUg= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8= +google.golang.org/grpc v1.80.0 h1:Xr6m2WmWZLETvUNvIUmeD5OAagMw3FiKmMlTdViWsHM= +google.golang.org/grpc v1.80.0/go.mod h1:ho/dLnxwi3EDJA4Zghp7k2Ec1+c2jqup0bFkw07bwF4= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +modernc.org/cc/v4 v4.27.3 h1:uNCgn37E5U09mTv1XgskEVUJ8ADKpmFMPxzGJ0TSo+U= +modernc.org/cc/v4 v4.27.3/go.mod h1:3YjcbCqhoTTHPycJDRl2WZKKFj0nwcOIPBfEZK0Hdk8= +modernc.org/ccgo/v4 v4.32.4 h1:L5OB8rpEX4ZsXEQwGozRfJyJSFHbbNVOoQ59DU9/KuU= +modernc.org/ccgo/v4 v4.32.4/go.mod h1:lY7f+fiTDHfcv6YlRgSkxYfhs+UvOEEzj49jAn2TOx0= +modernc.org/fileutil v1.4.0 h1:j6ZzNTftVS054gi281TyLjHPp6CPHr2KCxEXjEbD6SM= +modernc.org/fileutil v1.4.0/go.mod h1:EqdKFDxiByqxLk8ozOxObDSfcVOv/54xDs/DUHdvCUU= +modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI= +modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito= +modernc.org/gc/v3 v3.1.2 h1:ZtDCnhonXSZexk/AYsegNRV1lJGgaNZJuKjJSWKyEqo= +modernc.org/gc/v3 v3.1.2/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY= +modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks= +modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI= +modernc.org/libc v1.72.0 h1:IEu559v9a0XWjw0DPoVKtXpO2qt5NVLAnFaBbjq+n8c= +modernc.org/libc v1.72.0/go.mod h1:tTU8DL8A+XLVkEY3x5E/tO7s2Q/q42EtnNWda/L5QhQ= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= +modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= +modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8= +modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= +modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= +modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= +modernc.org/sqlite v1.49.0 h1:isQFJ0Vs7/t8PkjU+EKHskE+WRCKUpUIO4DdTniFTV8= +modernc.org/sqlite v1.49.0/go.mod h1:m0w8xhwYUVY3H6pSDwc3gkJ/irZT/0YEXwBlhaxQEew= +modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= +modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A= +modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= +modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= diff --git a/ghl/internal/auth/github.go b/ghl/internal/auth/github.go new file mode 100644 index 00000000..6f696d3d --- /dev/null +++ b/ghl/internal/auth/github.go @@ -0,0 +1,208 @@ +package auth + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "net/http" + "strings" + "sync" + "time" +) + +const githubAPIVersion = "2022-11-28" + +// GitHubConfig configures bearer-token validation against GitHub. +type GitHubConfig struct { + BaseURL string + AllowedOrgs []string + HTTPClient *http.Client + CacheTTL time.Duration +} + +// GitHubAuthenticator validates incoming bearer tokens against GitHub APIs. +type GitHubAuthenticator struct { + baseURL string + allowedOrgs []string + client *http.Client + cacheTTL time.Duration + + mu sync.Mutex + cache map[string]cacheEntry +} + +type cacheEntry struct { + expiresAt time.Time + err error +} + +type githubUser struct { + Login string `json:"login"` +} + +type githubMembership struct { + State string `json:"state"` +} + +// NewGitHubAuthenticator constructs a GitHub-backed token authenticator. +func NewGitHubAuthenticator(cfg GitHubConfig) *GitHubAuthenticator { + baseURL := strings.TrimSpace(cfg.BaseURL) + if baseURL == "" { + baseURL = "https://api.github.com" + } + client := cfg.HTTPClient + if client == nil { + client = &http.Client{Timeout: 10 * time.Second} + } + cacheTTL := cfg.CacheTTL + if cacheTTL <= 0 { + cacheTTL = 5 * time.Minute + } + return &GitHubAuthenticator{ + baseURL: strings.TrimRight(baseURL, "/"), + allowedOrgs: append([]string(nil), cfg.AllowedOrgs...), + client: client, + cacheTTL: cacheTTL, + cache: make(map[string]cacheEntry), + } +} + +// Authenticate validates the bearer token against GitHub and optional org membership. +func (a *GitHubAuthenticator) Authenticate(ctx context.Context, bearerToken string) error { + token := strings.TrimSpace(bearerToken) + if token == "" { + return errors.New("missing github token") + } + + cacheKey := hashToken(token) + if err, ok := a.cached(cacheKey); ok { + return err + } + + err := a.authenticateUncached(ctx, token) + if err == nil { + a.store(cacheKey, nil) + } + return err +} + +func (a *GitHubAuthenticator) authenticateUncached(ctx context.Context, token string) error { + user, err := a.fetchUser(ctx, token) + if err != nil { + return err + } + if len(a.allowedOrgs) == 0 { + return nil + } + for _, org := range a.allowedOrgs { + ok, err := a.isActiveOrgMember(ctx, token, org, user.Login) + if err == nil && ok { + return nil + } + } + return fmt.Errorf("github user %q is not an active member of allowed orgs", user.Login) +} + +func (a *GitHubAuthenticator) fetchUser(ctx context.Context, token string) (*githubUser, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, a.baseURL+"/user", nil) + if err != nil { + return nil, err + } + addGitHubHeaders(req, token) + + resp, err := a.client.Do(req) + if err != nil { + return nil, fmt.Errorf("github /user request failed: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("github /user returned %d", resp.StatusCode) + } + + var user githubUser + if err := json.NewDecoder(resp.Body).Decode(&user); err != nil { + return nil, fmt.Errorf("decode github /user: %w", err) + } + if user.Login == "" { + return nil, errors.New("github /user missing login") + } + return &user, nil +} + +func (a *GitHubAuthenticator) isActiveOrgMember(ctx context.Context, token, org, _ string) (bool, error) { + org = strings.TrimSpace(org) + if org == "" { + return false, nil + } + + // Use /user/orgs — lists all orgs the authenticated user belongs to. + // Works with any token scope. Check if the target org is in the list. + reqURL := a.baseURL + "/user/orgs?per_page=100" + req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqURL, nil) + if err != nil { + return false, err + } + addGitHubHeaders(req, token) + + resp, err := a.client.Do(req) + if err != nil { + return false, fmt.Errorf("github /user/orgs request failed: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return false, fmt.Errorf("github /user/orgs returned %d", resp.StatusCode) + } + + var orgs []struct { + Login string `json:"login"` + } + if err := json.NewDecoder(resp.Body).Decode(&orgs); err != nil { + return false, fmt.Errorf("decode github /user/orgs: %w", err) + } + for _, o := range orgs { + if strings.EqualFold(o.Login, org) { + return true, nil + } + } + return false, nil +} + +func addGitHubHeaders(req *http.Request, token string) { + req.Header.Set("Authorization", "Bearer "+token) + req.Header.Set("Accept", "application/vnd.github+json") + req.Header.Set("X-GitHub-Api-Version", githubAPIVersion) + req.Header.Set("User-Agent", "codebase-memory-mcp-ghl") +} + +func hashToken(token string) string { + sum := sha256.Sum256([]byte(token)) + return hex.EncodeToString(sum[:]) +} + +func (a *GitHubAuthenticator) cached(key string) (error, bool) { + a.mu.Lock() + defer a.mu.Unlock() + entry, ok := a.cache[key] + if !ok { + return nil, false + } + if time.Now().After(entry.expiresAt) { + delete(a.cache, key) + return nil, false + } + return entry.err, true +} + +func (a *GitHubAuthenticator) store(key string, err error) { + a.mu.Lock() + defer a.mu.Unlock() + a.cache[key] = cacheEntry{ + expiresAt: time.Now().Add(a.cacheTTL), + err: err, + } +} diff --git a/ghl/internal/auth/github_test.go b/ghl/internal/auth/github_test.go new file mode 100644 index 00000000..856e9142 --- /dev/null +++ b/ghl/internal/auth/github_test.go @@ -0,0 +1,178 @@ +package auth + +import ( + "context" + "net/http" + "net/http/httptest" + "sync/atomic" + "testing" + "time" +) + +func TestGitHubAuthenticatorAcceptsValidUserToken(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/user": + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"login":"octocat"}`)) + default: + http.NotFound(w, r) + } + })) + defer server.Close() + + auth := NewGitHubAuthenticator(GitHubConfig{ + BaseURL: server.URL, + CacheTTL: time.Minute, + }) + + if err := auth.Authenticate(context.Background(), "ghp-valid"); err != nil { + t.Fatalf("Authenticate: unexpected error: %v", err) + } +} + +func TestGitHubAuthenticatorRejectsUserOutsideAllowedOrg(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/user": + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"login":"octocat"}`)) + case "/user/memberships/orgs/GoHighLevel": + http.Error(w, "not found", http.StatusNotFound) + default: + http.NotFound(w, r) + } + })) + defer server.Close() + + auth := NewGitHubAuthenticator(GitHubConfig{ + BaseURL: server.URL, + AllowedOrgs: []string{"GoHighLevel"}, + CacheTTL: time.Minute, + }) + + if err := auth.Authenticate(context.Background(), "ghp-valid"); err == nil { + t.Fatal("Authenticate: expected org membership error, got nil") + } +} + +func TestGitHubAuthenticatorAcceptsActiveOrgMember(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/user": + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"login":"octocat"}`)) + case "/user/memberships/orgs/GoHighLevel": + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"state":"active"}`)) + default: + http.NotFound(w, r) + } + })) + defer server.Close() + + auth := NewGitHubAuthenticator(GitHubConfig{ + BaseURL: server.URL, + AllowedOrgs: []string{"GoHighLevel"}, + CacheTTL: time.Minute, + }) + + if err := auth.Authenticate(context.Background(), "ghp-valid"); err != nil { + t.Fatalf("Authenticate: unexpected error: %v", err) + } +} + +func TestGitHubAuthenticatorCachesSuccessfulValidation(t *testing.T) { + var userCalls atomic.Int32 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/user": + userCalls.Add(1) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"login":"octocat"}`)) + default: + http.NotFound(w, r) + } + })) + defer server.Close() + + auth := NewGitHubAuthenticator(GitHubConfig{ + BaseURL: server.URL, + CacheTTL: time.Minute, + }) + + if err := auth.Authenticate(context.Background(), "ghp-valid"); err != nil { + t.Fatalf("Authenticate first: unexpected error: %v", err) + } + if err := auth.Authenticate(context.Background(), "ghp-valid"); err != nil { + t.Fatalf("Authenticate second: unexpected error: %v", err) + } + if got := userCalls.Load(); got != 1 { + t.Fatalf("/user calls: want 1, got %d", got) + } +} + +func TestGitHubAuthenticatorDoesNotCacheTransientFailures(t *testing.T) { + var userCalls atomic.Int32 + var failFirst atomic.Bool + failFirst.Store(true) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/user": + userCalls.Add(1) + if failFirst.CompareAndSwap(true, false) { + http.Error(w, "temporary failure", http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"login":"octocat"}`)) + default: + http.NotFound(w, r) + } + })) + defer server.Close() + + auth := NewGitHubAuthenticator(GitHubConfig{ + BaseURL: server.URL, + CacheTTL: time.Minute, + }) + + if err := auth.Authenticate(context.Background(), "ghp-valid"); err == nil { + t.Fatal("Authenticate first: expected transient failure, got nil") + } + if err := auth.Authenticate(context.Background(), "ghp-valid"); err != nil { + t.Fatalf("Authenticate second: unexpected error: %v", err) + } + if got := userCalls.Load(); got != 2 { + t.Fatalf("/user calls: want 2 after transient failure retry, got %d", got) + } +} + +func TestGitHubAuthenticatorAcceptsUserInAnyAllowedOrg(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/user": + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"login":"octocat"}`)) + case "/user/memberships/orgs/OrgOne": + http.Error(w, "not found", http.StatusNotFound) + case "/user/memberships/orgs/OrgTwo": + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"state":"active"}`)) + default: + http.NotFound(w, r) + } + })) + defer server.Close() + + auth := NewGitHubAuthenticator(GitHubConfig{ + BaseURL: server.URL, + AllowedOrgs: []string{"OrgOne", "OrgTwo"}, + CacheTTL: time.Minute, + }) + + if err := auth.Authenticate(context.Background(), "ghp-valid"); err != nil { + t.Fatalf("Authenticate: unexpected error: %v", err) + } +} diff --git a/ghl/internal/bridge/bridge.go b/ghl/internal/bridge/bridge.go new file mode 100644 index 00000000..446062bb --- /dev/null +++ b/ghl/internal/bridge/bridge.go @@ -0,0 +1,177 @@ +// Package bridge exposes the codebase-memory-mcp stdio binary as an HTTP endpoint. +package bridge + +import ( + "context" + "encoding/json" + "errors" + "io" + "net/http" + "strings" +) + +// ErrBackendUnavailable is returned when the underlying MCP binary is not ready. +var ErrBackendUnavailable = errors.New("bridge: backend unavailable") + +// ErrBackendBusy is returned when the backend has no capacity for another request. +var ErrBackendBusy = errors.New("bridge: backend busy") + +// ErrMethodNotFound is returned when the bridge backend does not implement an MCP method. +var ErrMethodNotFound = errors.New("bridge: method not found") + +// Backend is the interface to the underlying MCP binary. +type Backend interface { + // Call forwards a JSON-RPC method + params and returns the raw result or error. + Call(ctx context.Context, method string, params json.RawMessage) (json.RawMessage, error) +} + +// Config configures the HTTP bridge. +type Config struct { + // BearerToken, if non-empty, requires all /mcp requests to carry + // "Authorization: Bearer ". + BearerToken string + // Authenticator, if non-nil, validates bearer tokens dynamically. + // When set, it takes precedence over BearerToken. + Authenticator Authenticator +} + +// Authenticator validates bearer tokens for HTTP requests. +type Authenticator interface { + Authenticate(ctx context.Context, bearerToken string) error +} + +// Handler is an http.Handler that bridges HTTP JSON-RPC requests to the MCP backend. +type Handler struct { + backend Backend + cfg Config +} + +// NewHandler creates a new bridge Handler. +func NewHandler(backend Backend, cfg Config) *Handler { + return &Handler{backend: backend, cfg: cfg} +} + +// jsonrpcRequest is the inbound envelope. +type jsonrpcRequest struct { + JSONRPC string `json:"jsonrpc"` + ID interface{} `json:"id"` + Method string `json:"method"` + Params json.RawMessage `json:"params,omitempty"` +} + +// ServeHTTP routes requests: +// +// GET /health — liveness check, no auth required +// POST /mcp — Streamable HTTP JSON-RPC, auth required if BearerToken is set +func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/health" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"status":"ok"}`)) + return + } + + if r.Method == http.MethodGet { + w.Header().Set("Allow", http.MethodPost) + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + + if r.Method != http.MethodPost { + w.Header().Set("Allow", http.MethodPost) + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + + // Auth check + if h.cfg.Authenticator != nil { + auth := r.Header.Get("Authorization") + if !strings.HasPrefix(auth, "Bearer ") { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + if err := h.cfg.Authenticator.Authenticate(r.Context(), strings.TrimPrefix(auth, "Bearer ")); err != nil { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + } else if h.cfg.BearerToken != "" { + auth := r.Header.Get("Authorization") + if !strings.HasPrefix(auth, "Bearer ") || strings.TrimPrefix(auth, "Bearer ") != h.cfg.BearerToken { + http.Error(w, "unauthorized", http.StatusUnauthorized) + return + } + } + + body, err := io.ReadAll(io.LimitReader(r.Body, 4<<20)) // 4 MB cap + if err != nil { + http.Error(w, "failed to read body", http.StatusBadRequest) + return + } + + var req jsonrpcRequest + if err := json.Unmarshal(body, &req); err != nil { + http.Error(w, "invalid JSON", http.StatusBadRequest) + return + } + + if req.JSONRPC != "" && req.JSONRPC != "2.0" { + w.Header().Set("Content-Type", "application/json") + writeError(w, req.ID, -32600, "invalid request: jsonrpc must be 2.0") + return + } + + // MCP notifications do not expect a JSON-RPC response body. + if req.ID == nil && strings.HasPrefix(req.Method, "notifications/") { + w.WriteHeader(http.StatusAccepted) + return + } + + result, backendErr := h.backend.Call(r.Context(), req.Method, req.Params) + if backendErr != nil { + switch { + case errors.Is(backendErr, context.Canceled): + return + case errors.Is(backendErr, context.DeadlineExceeded): + http.Error(w, "backend timed out", http.StatusGatewayTimeout) + return + case errors.Is(backendErr, ErrBackendBusy): + w.Header().Set("Retry-After", "1") + http.Error(w, "backend overloaded, retry later", http.StatusServiceUnavailable) + return + case errors.Is(backendErr, ErrMethodNotFound): + w.Header().Set("Content-Type", "application/json") + writeError(w, req.ID, -32601, backendErr.Error()) + default: + w.Header().Set("Content-Type", "application/json") + writeError(w, req.ID, -32603, "backend error: "+backendErr.Error()) + } + return + } + + w.Header().Set("Content-Type", "application/json") + + resp := struct { + JSONRPC string `json:"jsonrpc"` + ID interface{} `json:"id"` + Result json.RawMessage `json:"result"` + }{ + JSONRPC: "2.0", + ID: req.ID, + Result: result, + } + w.WriteHeader(http.StatusOK) + _ = json.NewEncoder(w).Encode(resp) +} + +func writeError(w http.ResponseWriter, id interface{}, code int, message string) { + resp := map[string]interface{}{ + "jsonrpc": "2.0", + "id": id, + "error": map[string]interface{}{ + "code": code, + "message": message, + }, + } + w.WriteHeader(http.StatusOK) + _ = json.NewEncoder(w).Encode(resp) +} diff --git a/ghl/internal/bridge/bridge_test.go b/ghl/internal/bridge/bridge_test.go new file mode 100644 index 00000000..867fec17 --- /dev/null +++ b/ghl/internal/bridge/bridge_test.go @@ -0,0 +1,317 @@ +package bridge_test + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/bridge" +) + +// ── Fake MCP backend ────────────────────────────────────────── + +type fakeBackend struct { + response json.RawMessage + err error + method string + params json.RawMessage + calls int + ctx context.Context +} + +func (f *fakeBackend) Call(ctx context.Context, method string, params json.RawMessage) (json.RawMessage, error) { + f.ctx = ctx + f.method = method + f.params = append(json.RawMessage(nil), params...) + f.calls++ + return f.response, f.err +} + +// ── Helpers ──────────────────────────────────────────────────── + +func mcpRequest(t *testing.T, id interface{}, method string, params interface{}) []byte { + t.Helper() + p, _ := json.Marshal(params) + req := map[string]interface{}{ + "jsonrpc": "2.0", + "id": id, + "method": method, + "params": json.RawMessage(p), + } + b, _ := json.Marshal(req) + return b +} + +type fakeAuthenticator struct { + token string + calls int +} + +func (f *fakeAuthenticator) Authenticate(_ context.Context, bearerToken string) error { + f.calls++ + if bearerToken != f.token { + return bridge.ErrBackendUnavailable + } + return nil +} + +// ── Tests ────────────────────────────────────────────────────── + +func TestBridge_ForwardsToolCall(t *testing.T) { + expected := json.RawMessage(`{"content":[{"type":"text","text":"ok"}],"isError":false}`) + backend := &fakeBackend{response: expected} + h := bridge.NewHandler(backend, bridge.Config{}) + + body := mcpRequest(t, 1, "tools/call", map[string]interface{}{ + "name": "list_projects", + "arguments": map[string]interface{}{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Errorf("status: want 200, got %d\nbody: %s", rr.Code, rr.Body.String()) + } + + var resp map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("parse response: %v\nbody: %s", err, rr.Body.String()) + } + if resp["jsonrpc"] != "2.0" { + t.Errorf("jsonrpc: want 2.0, got %v", resp["jsonrpc"]) + } + if resp["result"] == nil { + t.Error("result: want non-nil") + } + if backend.method != "tools/call" { + t.Errorf("method: want tools/call, got %q", backend.method) + } + if backend.ctx == nil { + t.Error("backend ctx: expected request context to be forwarded") + } +} + +func TestBridge_ReturnsErrorOnBackendFailure(t *testing.T) { + backend := &fakeBackend{err: bridge.ErrBackendUnavailable} + h := bridge.NewHandler(backend, bridge.Config{}) + + body := mcpRequest(t, 2, "tools/call", map[string]interface{}{"name": "list_projects"}) + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + // HTTP level: still 200 (MCP errors are in the JSON body) + if rr.Code != http.StatusOK { + t.Errorf("status: want 200, got %d", rr.Code) + } + + var resp map[string]interface{} + json.Unmarshal(rr.Body.Bytes(), &resp) + if resp["error"] == nil { + t.Error("expected JSON-RPC error field for backend failure") + } +} + +func TestBridge_ReturnsServiceUnavailableWhenBackendBusy(t *testing.T) { + backend := &fakeBackend{err: bridge.ErrBackendBusy} + h := bridge.NewHandler(backend, bridge.Config{}) + + body := mcpRequest(t, 2, "tools/call", map[string]interface{}{"name": "list_projects"}) + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusServiceUnavailable { + t.Fatalf("status: want 503, got %d", rr.Code) + } + if got := rr.Header().Get("Retry-After"); got != "1" { + t.Fatalf("Retry-After: want 1, got %q", got) + } +} + +func TestBridge_RequiresAuthToken(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + h := bridge.NewHandler(backend, bridge.Config{ + BearerToken: "secret-token", + }) + + body := mcpRequest(t, 3, "tools/call", nil) + + // Request without token + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusUnauthorized { + t.Errorf("status: want 401 without token, got %d", rr.Code) + } + + // Request with correct token + req2 := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req2.Header.Set("Content-Type", "application/json") + req2.Header.Set("Authorization", "Bearer secret-token") + rr2 := httptest.NewRecorder() + h.ServeHTTP(rr2, req2) + + if rr2.Code != http.StatusOK { + t.Errorf("status: want 200 with correct token, got %d", rr2.Code) + } +} + +func TestBridge_UsesAuthenticatorWhenConfigured(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + auth := &fakeAuthenticator{token: "ghp-valid"} + h := bridge.NewHandler(backend, bridge.Config{ + BearerToken: "legacy-token", + Authenticator: auth, + }) + + body := mcpRequest(t, 4, "tools/call", nil) + + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer ghp-valid") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("status: want 200 with valid authenticator token, got %d", rr.Code) + } + if auth.calls != 1 { + t.Fatalf("auth calls: want 1, got %d", auth.calls) + } +} + +func TestBridge_RejectsInvalidAuthenticatorToken(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + auth := &fakeAuthenticator{token: "ghp-valid"} + h := bridge.NewHandler(backend, bridge.Config{ + Authenticator: auth, + }) + + body := mcpRequest(t, 5, "tools/call", nil) + + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer ghp-invalid") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusUnauthorized { + t.Fatalf("status: want 401 with invalid authenticator token, got %d", rr.Code) + } + if auth.calls != 1 { + t.Fatalf("auth calls: want 1, got %d", auth.calls) + } +} + +func TestBridge_InvalidJSON_BadRequest(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + h := bridge.NewHandler(backend, bridge.Config{}) + + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader([]byte("not json {"))) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusBadRequest { + t.Errorf("status: want 400 for invalid JSON, got %d", rr.Code) + } +} + +func TestBridge_MethodNotAllowed(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + h := bridge.NewHandler(backend, bridge.Config{}) + + req := httptest.NewRequest(http.MethodGet, "/mcp", nil) + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusMethodNotAllowed { + t.Errorf("status: want 405 for GET, got %d", rr.Code) + } + if got := rr.Header().Get("Allow"); got != http.MethodPost { + t.Errorf("Allow: want POST, got %q", got) + } +} + +func TestBridge_HealthEndpoint(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + h := bridge.NewHandler(backend, bridge.Config{}) + + req := httptest.NewRequest(http.MethodGet, "/health", nil) + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Errorf("status: want 200 for /health, got %d", rr.Code) + } +} + +func TestBridge_PreservesRequestID(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{"content":[],"isError":false}`)} + h := bridge.NewHandler(backend, bridge.Config{}) + + body := mcpRequest(t, "req-42", "tools/call", map[string]interface{}{"name": "list_projects"}) + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + var resp map[string]interface{} + json.Unmarshal(rr.Body.Bytes(), &resp) + if resp["id"] != "req-42" { + t.Errorf("id: want req-42, got %v", resp["id"]) + } +} + +func TestBridge_NotificationAcceptedWithoutResponse(t *testing.T) { + backend := &fakeBackend{response: json.RawMessage(`{}`)} + h := bridge.NewHandler(backend, bridge.Config{}) + + body := []byte(`{"jsonrpc":"2.0","method":"notifications/initialized"}`) + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + if rr.Code != http.StatusAccepted { + t.Errorf("status: want 202 for notification, got %d", rr.Code) + } + if rr.Body.Len() != 0 { + t.Errorf("body: want empty notification response, got %q", rr.Body.String()) + } + if backend.calls != 0 { + t.Errorf("backend calls: want 0, got %d", backend.calls) + } +} + +func TestBridge_ReturnsMethodNotFound(t *testing.T) { + backend := &fakeBackend{err: bridge.ErrMethodNotFound} + h := bridge.NewHandler(backend, bridge.Config{}) + + body := mcpRequest(t, 9, "unknown/method", nil) + req := httptest.NewRequest(http.MethodPost, "/mcp", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h.ServeHTTP(rr, req) + + var resp map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("parse response: %v", err) + } + + errObj, _ := resp["error"].(map[string]interface{}) + if code := int(errObj["code"].(float64)); code != -32601 { + t.Errorf("error code: want -32601, got %d", code) + } +} diff --git a/ghl/internal/bridge/searchcache.go b/ghl/internal/bridge/searchcache.go new file mode 100644 index 00000000..f68f33e4 --- /dev/null +++ b/ghl/internal/bridge/searchcache.go @@ -0,0 +1,138 @@ +// Package bridge — SearchCache provides a short-lived in-memory cache for +// search tool results to avoid redundant grep pipeline work on repeated queries. +package bridge + +import ( + "crypto/sha256" + "encoding/json" + "fmt" + "log/slog" + "sync" + "time" +) + +// cacheEntry holds a cached result and its expiry. +type cacheEntry struct { + result json.RawMessage + expiresAt time.Time + insertedAt time.Time +} + +// SearchCache is a concurrent-safe, TTL-bounded in-memory cache for tool results. +// It is per-process (not shared across Cloud Run instances). +type SearchCache struct { + mu sync.RWMutex + entries map[string]*cacheEntry + maxSize int + ttl time.Duration +} + +// NewSearchCache creates a SearchCache and starts a background goroutine that +// evicts expired entries every ttl/2 (minimum 5 s). +func NewSearchCache(maxSize int, ttl time.Duration) *SearchCache { + c := &SearchCache{ + entries: make(map[string]*cacheEntry, maxSize), + maxSize: maxSize, + ttl: ttl, + } + + sweepInterval := ttl / 2 + if sweepInterval < 5*time.Second { + sweepInterval = 5 * time.Second + } + go c.sweepLoop(sweepInterval) + + return c +} + +// Key derives a cache key from the tool name and its arguments map. +// The key is a hex-encoded SHA-256 of "toolName\x00". +func (c *SearchCache) Key(toolName string, params map[string]interface{}) string { + b, err := json.Marshal(params) + if err != nil { + // Fallback: uncacheable; return empty string (callers must handle ""). + return "" + } + h := sha256.New() + h.Write([]byte(toolName)) + h.Write([]byte{0x00}) + h.Write(b) + return fmt.Sprintf("%x", h.Sum(nil)) +} + +// Get returns the cached result for key if it exists and has not expired. +// The second return value is false on a cache miss. +func (c *SearchCache) Get(key string) (json.RawMessage, bool) { + if key == "" { + return nil, false + } + c.mu.RLock() + entry, ok := c.entries[key] + c.mu.RUnlock() + if !ok { + return nil, false + } + if time.Now().After(entry.expiresAt) { + return nil, false + } + return entry.result, true +} + +// Set stores result under key with the configured TTL. +// If the cache is at maxSize, the oldest entry is evicted first. +func (c *SearchCache) Set(key string, result json.RawMessage) { + if key == "" || len(result) == 0 { + return + } + now := time.Now() + c.mu.Lock() + defer c.mu.Unlock() + + // Evict oldest entry when at capacity (only when adding a new key). + if _, exists := c.entries[key]; !exists && len(c.entries) >= c.maxSize { + c.evictOldestLocked() + } + + c.entries[key] = &cacheEntry{ + result: result, + expiresAt: now.Add(c.ttl), + insertedAt: now, + } +} + +// evictOldestLocked removes the entry with the earliest insertedAt. +// Must be called with c.mu held for writing. +func (c *SearchCache) evictOldestLocked() { + var oldestKey string + var oldestTime time.Time + for k, e := range c.entries { + if oldestKey == "" || e.insertedAt.Before(oldestTime) { + oldestKey = k + oldestTime = e.insertedAt + } + } + if oldestKey != "" { + delete(c.entries, oldestKey) + } +} + +// sweepLoop periodically removes expired entries to bound memory usage. +func (c *SearchCache) sweepLoop(interval time.Duration) { + ticker := time.NewTicker(interval) + defer ticker.Stop() + for range ticker.C { + now := time.Now() + c.mu.Lock() + evicted := 0 + for k, e := range c.entries { + if now.After(e.expiresAt) { + delete(c.entries, k) + evicted++ + } + } + c.mu.Unlock() + if evicted > 0 { + slog.Debug("search cache: swept expired entries", "evicted", evicted) + } + } +} diff --git a/ghl/internal/cachepersist/gcs.go b/ghl/internal/cachepersist/gcs.go new file mode 100644 index 00000000..affaa5cd --- /dev/null +++ b/ghl/internal/cachepersist/gcs.go @@ -0,0 +1,228 @@ +package cachepersist + +import ( + "context" + "fmt" + "io" + "os" + "path" + "path/filepath" + "sort" + "strings" + "sync/atomic" + "time" + + "cloud.google.com/go/storage" + "golang.org/x/sync/errgroup" + "google.golang.org/api/iterator" +) + +const gcsOperationTimeout = 10 * time.Minute + +// NewGCS creates a syncer that persists SQLite artifacts directly to GCS. +func NewGCS(ctx context.Context, runtimeDir, bucket, prefix string) (*Syncer, error) { + runtimeDir = strings.TrimSpace(runtimeDir) + bucket = strings.TrimSpace(bucket) + if runtimeDir == "" { + return nil, fmt.Errorf("cachepersist: runtime dir is required") + } + if bucket == "" { + return nil, fmt.Errorf("cachepersist: gcs bucket is required") + } + if err := os.MkdirAll(runtimeDir, 0o750); err != nil { + return nil, fmt.Errorf("cachepersist: create runtime dir: %w", err) + } + + client, err := storage.NewClient(ctx) + if err != nil { + return nil, fmt.Errorf("cachepersist: create gcs client: %w", err) + } + + prefix = normalizeGCSPrefix(prefix) + artifactDir := "gs://" + bucket + if prefix != "" { + artifactDir += "/" + prefix + } + + return &Syncer{ + RuntimeDir: runtimeDir, + ArtifactDir: artifactDir, + backend: &gcsBackend{ + client: client, + bucket: bucket, + prefix: prefix, + }, + }, nil +} + +type gcsBackend struct { + client *storage.Client + bucket string + prefix string +} + +func (b *gcsBackend) Hydrate(runtimeDir string) (int, error) { + ctx, cancel := context.WithTimeout(context.Background(), gcsOperationTimeout) + defer cancel() + + files, err := b.listDBObjects(ctx) + if err != nil { + return 0, err + } + if len(files) == 0 { + return 0, nil + } + + // Parallel download with up to 32 concurrent workers. + g, gctx := errgroup.WithContext(ctx) + g.SetLimit(32) + var copied atomic.Int64 + + for _, attrs := range files { + attrs := attrs + g.Go(func() error { + name := path.Base(attrs.Name) + dst := filepath.Join(runtimeDir, name) + + // Skip if already exists and same size. + if info, statErr := os.Stat(dst); statErr == nil && info.Size() == attrs.Size { + copied.Add(1) + return nil + } + + reader, rErr := b.client.Bucket(b.bucket).Object(attrs.Name).NewReader(gctx) + if rErr != nil { + return fmt.Errorf("cachepersist: open %s: %w", attrs.Name, rErr) + } + wErr := copyReaderAtomic(reader, dst, 0o640) + _ = reader.Close() + if wErr != nil { + return fmt.Errorf("cachepersist: hydrate %s: %w", name, wErr) + } + copied.Add(1) + return nil + }) + } + + if err := g.Wait(); err != nil { + return int(copied.Load()), err + } + return int(copied.Load()), nil +} + +func (b *gcsBackend) PersistProject(runtimeDir, project string) (int, error) { + project = strings.TrimSpace(project) + if project == "" { + return 0, fmt.Errorf("cachepersist: project is required") + } + + pattern := filepath.Join(runtimeDir, project+".db*") + matches, err := filepath.Glob(pattern) + if err != nil { + return 0, fmt.Errorf("cachepersist: glob project artifacts: %w", err) + } + sort.Strings(matches) + + copied := 0 + for _, src := range matches { + info, err := os.Stat(src) + if err != nil { + if os.IsNotExist(err) { + continue + } + return copied, fmt.Errorf("cachepersist: stat %s: %w", src, err) + } + if info.IsDir() || !isDBArtifact(info.Name()) { + continue + } + + ctx, cancel := context.WithTimeout(context.Background(), gcsOperationTimeout) + if err := b.uploadFile(ctx, src, info.Name()); err != nil { + cancel() + return copied, fmt.Errorf("cachepersist: persist %s: %w", info.Name(), err) + } + cancel() + copied++ + } + return copied, nil +} + +func (b *gcsBackend) CountArtifacts() (int, error) { + ctx, cancel := context.WithTimeout(context.Background(), gcsOperationTimeout) + defer cancel() + + files, err := b.listDBObjects(ctx) + if err != nil { + return 0, err + } + return len(files), nil +} + +func (b *gcsBackend) Close() error { + return b.client.Close() +} + +func (b *gcsBackend) uploadFile(ctx context.Context, srcPath, name string) error { + input, err := os.Open(srcPath) + if err != nil { + return err + } + defer input.Close() + + writer := b.client.Bucket(b.bucket).Object(b.objectName(name)).NewWriter(ctx) + writer.ContentType = "application/octet-stream" + if _, err := io.Copy(writer, input); err != nil { + _ = writer.Close() + return err + } + if err := writer.Close(); err != nil { + return err + } + return nil +} + +func (b *gcsBackend) listDBObjects(ctx context.Context) ([]*storage.ObjectAttrs, error) { + query := &storage.Query{Prefix: b.listPrefix()} + iter := b.client.Bucket(b.bucket).Objects(ctx, query) + + files := make([]*storage.ObjectAttrs, 0) + for { + attrs, err := iter.Next() + if err == iterator.Done { + break + } + if err != nil { + return nil, fmt.Errorf("cachepersist: list gcs objects: %w", err) + } + if attrs == nil || strings.HasSuffix(attrs.Name, "/") { + continue + } + if !isDBArtifact(path.Base(attrs.Name)) { + continue + } + files = append(files, attrs) + } + + sort.Slice(files, func(i, j int) bool { + return files[i].Name < files[j].Name + }) + return files, nil +} + +func (b *gcsBackend) listPrefix() string { + if b.prefix == "" { + return "" + } + return b.prefix + "/" +} + +func (b *gcsBackend) objectName(name string) string { + if b.prefix == "" { + return name + } + return b.prefix + "/" + name +} + +func normalizeGCSPrefix(prefix string) string { + return strings.Trim(strings.TrimSpace(prefix), "/") +} diff --git a/ghl/internal/cachepersist/sync.go b/ghl/internal/cachepersist/sync.go new file mode 100644 index 00000000..b3155abc --- /dev/null +++ b/ghl/internal/cachepersist/sync.go @@ -0,0 +1,210 @@ +package cachepersist + +import ( + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" +) + +type backend interface { + Hydrate(runtimeDir string) (int, error) + PersistProject(runtimeDir, project string) (int, error) + CountArtifacts() (int, error) + Close() error +} + +// Syncer keeps runtime SQLite indexes on local disk while persisting copies in +// a durable artifact directory. +type Syncer struct { + RuntimeDir string + ArtifactDir string + backend backend +} + +// New validates and prepares a cache syncer. +func New(runtimeDir, artifactDir string) (*Syncer, error) { + runtimeDir = strings.TrimSpace(runtimeDir) + artifactDir = strings.TrimSpace(artifactDir) + if runtimeDir == "" { + return nil, fmt.Errorf("cachepersist: runtime dir is required") + } + if err := os.MkdirAll(runtimeDir, 0o750); err != nil { + return nil, fmt.Errorf("cachepersist: create runtime dir: %w", err) + } + artifactDir = strings.TrimSpace(artifactDir) + if artifactDir == "" { + return nil, fmt.Errorf("cachepersist: artifact dir is required") + } + if err := os.MkdirAll(artifactDir, 0o750); err != nil { + return nil, fmt.Errorf("cachepersist: create artifact dir: %w", err) + } + return &Syncer{ + RuntimeDir: runtimeDir, + ArtifactDir: artifactDir, + backend: &fsBackend{artifactDir: artifactDir}, + }, nil +} + +// Hydrate restores persisted index artifacts into the local runtime cache. +func (s *Syncer) Hydrate() (int, error) { + if s == nil || s.backend == nil { + return 0, nil + } + return s.backend.Hydrate(s.RuntimeDir) +} + +// PersistProject persists one project's SQLite files into the artifact dir. +func (s *Syncer) PersistProject(project string) (int, error) { + if s == nil || s.backend == nil { + return 0, nil + } + return s.backend.PersistProject(s.RuntimeDir, project) +} + +// CountArtifacts returns the number of persisted DB artifact files. +func (s *Syncer) CountArtifacts() (int, error) { + if s == nil || s.backend == nil { + return 0, nil + } + return s.backend.CountArtifacts() +} + +// Close releases any resources held by the syncer backend. +func (s *Syncer) Close() error { + if s == nil || s.backend == nil { + return nil + } + return s.backend.Close() +} + +func listDBArtifacts(dir string) ([]string, error) { + entries, err := os.ReadDir(dir) + if err != nil { + return nil, fmt.Errorf("cachepersist: read dir %s: %w", dir, err) + } + files := make([]string, 0, len(entries)) + for _, entry := range entries { + if entry.IsDir() || !isDBArtifact(entry.Name()) { + continue + } + files = append(files, entry.Name()) + } + sort.Strings(files) + return files, nil +} + +func isDBArtifact(name string) bool { + return strings.HasSuffix(name, ".db") +} + +type fsBackend struct { + artifactDir string +} + +func (b *fsBackend) Hydrate(runtimeDir string) (int, error) { + files, err := listDBArtifacts(b.artifactDir) + if err != nil { + return 0, err + } + copied := 0 + for _, name := range files { + src := filepath.Join(b.artifactDir, name) + dst := filepath.Join(runtimeDir, name) + if err := copyFileAtomic(src, dst); err != nil { + return copied, fmt.Errorf("cachepersist: hydrate %s: %w", name, err) + } + copied++ + } + return copied, nil +} + +func (b *fsBackend) PersistProject(runtimeDir, project string) (int, error) { + project = strings.TrimSpace(project) + if project == "" { + return 0, fmt.Errorf("cachepersist: project is required") + } + pattern := filepath.Join(runtimeDir, project+".db*") + matches, err := filepath.Glob(pattern) + if err != nil { + return 0, fmt.Errorf("cachepersist: glob project artifacts: %w", err) + } + sort.Strings(matches) + copied := 0 + for _, src := range matches { + info, err := os.Stat(src) + if err != nil { + if os.IsNotExist(err) { + continue + } + return copied, fmt.Errorf("cachepersist: stat %s: %w", src, err) + } + if info.IsDir() || !isDBArtifact(info.Name()) { + continue + } + dst := filepath.Join(b.artifactDir, info.Name()) + if err := copyFileAtomic(src, dst); err != nil { + return copied, fmt.Errorf("cachepersist: persist %s: %w", info.Name(), err) + } + copied++ + } + return copied, nil +} + +func (b *fsBackend) CountArtifacts() (int, error) { + files, err := listDBArtifacts(b.artifactDir) + if err != nil { + return 0, err + } + return len(files), nil +} + +func (b *fsBackend) Close() error { + return nil +} + +func copyFileAtomic(src, dst string) error { + input, err := os.Open(src) + if err != nil { + return err + } + defer input.Close() + + info, err := input.Stat() + if err != nil { + return err + } + + return copyReaderAtomic(input, dst, info.Mode()) +} + +func copyReaderAtomic(input io.Reader, dst string, mode os.FileMode) error { + if err := os.MkdirAll(filepath.Dir(dst), 0o750); err != nil { + return err + } + tmp, err := os.CreateTemp(filepath.Dir(dst), ".cachepersist-*") + if err != nil { + return err + } + tmpName := tmp.Name() + defer func() { + _ = tmp.Close() + _ = os.Remove(tmpName) + }() + + if _, err := io.Copy(tmp, input); err != nil { + return err + } + if err := tmp.Chmod(mode); err != nil { + return err + } + if err := tmp.Close(); err != nil { + return err + } + if err := os.Rename(tmpName, dst); err != nil { + return err + } + return nil +} diff --git a/ghl/internal/cachepersist/sync_test.go b/ghl/internal/cachepersist/sync_test.go new file mode 100644 index 00000000..fa9af738 --- /dev/null +++ b/ghl/internal/cachepersist/sync_test.go @@ -0,0 +1,110 @@ +package cachepersist + +import ( + "os" + "path/filepath" + "testing" +) + +func TestHydrateCopiesDBArtifactsOnly(t *testing.T) { + artifactDir := t.TempDir() + runtimeDir := t.TempDir() + + writeFile(t, filepath.Join(artifactDir, "platform-backend.db"), "db") + writeFile(t, filepath.Join(artifactDir, "platform-backend.db-wal"), "wal") + writeFile(t, filepath.Join(artifactDir, "platform-backend.db-shm"), "shm") + writeFile(t, filepath.Join(artifactDir, "README.txt"), "ignore") + + syncer, err := New(runtimeDir, artifactDir) + if err != nil { + t.Fatalf("New: %v", err) + } + + copied, err := syncer.Hydrate() + if err != nil { + t.Fatalf("Hydrate: %v", err) + } + if copied != 1 { + t.Fatalf("copied: want 1, got %d", copied) + } + if _, err := os.Stat(filepath.Join(runtimeDir, "platform-backend.db")); err != nil { + t.Fatalf("runtime db missing: %v", err) + } + if _, err := os.Stat(filepath.Join(runtimeDir, "platform-backend.db-wal")); !os.IsNotExist(err) { + t.Fatalf("unexpected wal copied: %v", err) + } + if _, err := os.Stat(filepath.Join(runtimeDir, "platform-backend.db-shm")); !os.IsNotExist(err) { + t.Fatalf("unexpected shm copied: %v", err) + } + if _, err := os.Stat(filepath.Join(runtimeDir, "README.txt")); !os.IsNotExist(err) { + t.Fatalf("unexpected non-db file copied: %v", err) + } +} + +func TestPersistProjectCopiesMatchingArtifacts(t *testing.T) { + artifactDir := t.TempDir() + runtimeDir := t.TempDir() + + writeFile(t, filepath.Join(runtimeDir, "platform-backend.db"), "db") + writeFile(t, filepath.Join(runtimeDir, "platform-backend.db-wal"), "wal") + writeFile(t, filepath.Join(runtimeDir, "platform-backend.db-shm"), "shm") + writeFile(t, filepath.Join(runtimeDir, "other.db"), "other") + + syncer, err := New(runtimeDir, artifactDir) + if err != nil { + t.Fatalf("New: %v", err) + } + + copied, err := syncer.PersistProject("platform-backend") + if err != nil { + t.Fatalf("PersistProject: %v", err) + } + if copied != 1 { + t.Fatalf("copied: want 1, got %d", copied) + } + if _, err := os.Stat(filepath.Join(artifactDir, "platform-backend.db")); err != nil { + t.Fatalf("artifact db missing: %v", err) + } + if _, err := os.Stat(filepath.Join(artifactDir, "platform-backend.db-wal")); !os.IsNotExist(err) { + t.Fatalf("unexpected wal artifact copied: %v", err) + } + if _, err := os.Stat(filepath.Join(artifactDir, "platform-backend.db-shm")); !os.IsNotExist(err) { + t.Fatalf("unexpected shm artifact copied: %v", err) + } + if _, err := os.Stat(filepath.Join(artifactDir, "other.db")); !os.IsNotExist(err) { + t.Fatalf("unexpected unrelated artifact copied: %v", err) + } +} + +func TestCountArtifacts(t *testing.T) { + artifactDir := t.TempDir() + runtimeDir := t.TempDir() + + writeFile(t, filepath.Join(artifactDir, "a.db"), "a") + writeFile(t, filepath.Join(artifactDir, "a.db-wal"), "wal") + writeFile(t, filepath.Join(artifactDir, "a.db-shm"), "shm") + writeFile(t, filepath.Join(artifactDir, "notes.md"), "ignore") + + syncer, err := New(runtimeDir, artifactDir) + if err != nil { + t.Fatalf("New: %v", err) + } + + count, err := syncer.CountArtifacts() + if err != nil { + t.Fatalf("CountArtifacts: %v", err) + } + if count != 1 { + t.Fatalf("count: want 1, got %d", count) + } +} + +func writeFile(t *testing.T, path, content string) { + t.Helper() + if err := os.MkdirAll(filepath.Dir(path), 0o750); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(path, []byte(content), 0o640); err != nil { + t.Fatalf("write file: %v", err) + } +} diff --git a/ghl/internal/discovery/discovery.go b/ghl/internal/discovery/discovery.go new file mode 100644 index 00000000..3e8b39a3 --- /dev/null +++ b/ghl/internal/discovery/discovery.go @@ -0,0 +1,76 @@ +package discovery + +import ( + "context" +) + +// ToolDefinition describes the wrapper-owned discover_projects MCP tool. +type ToolDefinition struct { + Name string `json:"name"` + Description string `json:"description"` + InputSchema map[string]interface{} `json:"inputSchema"` +} + +// Candidate is a single repo candidate returned by discovery. +type Candidate struct { + Project string `json:"project"` + RepoSlug string `json:"repo_slug"` + Score float64 `json:"score,omitempty"` + Confidence string `json:"confidence,omitempty"` + Reasons []string `json:"reasons,omitempty"` +} + +// Request is the discover_projects tool input. +type Request struct { + Query string `json:"query"` + Limit int `json:"limit,omitempty"` + IncludeGraphConfidence bool `json:"include_graph_confidence,omitempty"` + IncludeSemantic bool `json:"include_semantic,omitempty"` +} + +// Response is the discover_projects tool output. +type Response struct { + Query string `json:"query"` + CrossRepo bool `json:"cross_repo,omitempty"` + PrimaryRepos []Candidate `json:"primary_repos,omitempty"` + RelatedRepos []Candidate `json:"related_repos,omitempty"` +} + +// Service executes wrapper-owned repo discovery. +type Service interface { + Definition() ToolDefinition + DiscoverProjects(ctx context.Context, req Request) (Response, error) +} + +// NewDefinition returns the canonical wrapper tool definition. +func NewDefinition() ToolDefinition { + return ToolDefinition{ + Name: "discover_projects", + Description: "Discover the most likely indexed repos for a task using metadata, code search, and graph evidence.", + InputSchema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "query": map[string]interface{}{ + "type": "string", + "description": "Task or feature description to map to indexed repositories.", + }, + "limit": map[string]interface{}{ + "type": "integer", + "default": 5, + "description": "Maximum number of candidate repositories to return.", + }, + "include_graph_confidence": map[string]interface{}{ + "type": "boolean", + "default": true, + "description": "When true, use graph-level architecture checks to refine confidence for top candidates.", + }, + "include_semantic": map[string]interface{}{ + "type": "boolean", + "default": false, + "description": "When true, optionally use semantic vector hits where available as positive evidence.", + }, + }, + "required": []string{"query"}, + }, + } +} diff --git a/ghl/internal/discovery/discovery_test.go b/ghl/internal/discovery/discovery_test.go new file mode 100644 index 00000000..025d93b3 --- /dev/null +++ b/ghl/internal/discovery/discovery_test.go @@ -0,0 +1,314 @@ +package discovery + +import ( + "context" + "encoding/json" + "testing" + + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/manifest" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/mcp" +) + +type fakeToolCaller struct { + tools map[string]func(params map[string]interface{}) *mcp.ToolResult +} + +func (f *fakeToolCaller) CallTool(_ context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) { + if fn, ok := f.tools[name]; ok { + return fn(params), nil + } + return &mcp.ToolResult{}, nil +} + +func jsonToolResult(t *testing.T, payload interface{}) *mcp.ToolResult { + t.Helper() + raw, err := json.Marshal(payload) + if err != nil { + t.Fatalf("marshal payload: %v", err) + } + return &mcp.ToolResult{ + Content: []mcp.Content{{Type: "text", Text: string(raw)}}, + } +} + +func TestDiscoverProjectsNormalizesCatalogFromRootPath(t *testing.T) { + svc := NewService(&fakeToolCaller{ + tools: map[string]func(map[string]interface{}) *mcp.ToolResult{ + "list_projects": func(params map[string]interface{}) *mcp.ToolResult { + return jsonToolResult(t, map[string]interface{}{ + "projects": []map[string]interface{}{ + { + "name": "app-fleet-cache-membership-backend", + "root_path": "/app/fleet-cache/membership-backend", + "nodes": 5942, + "edges": 11602, + }, + }, + }) + }, + }, + }, manifest.Manifest{ + Repos: []manifest.Repo{ + {Name: "membership-backend", Team: "revex", Type: "service", Tags: []string{"membership", "checkout"}}, + }, + }, Options{}) + + catalog, err := svc.refreshCatalog(context.Background()) + if err != nil { + t.Fatalf("refreshCatalog: %v", err) + } + if len(catalog) != 1 { + t.Fatalf("catalog size: want 1, got %d", len(catalog)) + } + if catalog[0].RepoSlug != "membership-backend" { + t.Fatalf("repo slug: want membership-backend, got %q", catalog[0].RepoSlug) + } + if catalog[0].Team != "revex" { + t.Fatalf("team: want revex, got %q", catalog[0].Team) + } +} + +func TestDiscoverProjectsRanksByMetadataAndBM25(t *testing.T) { + svc := NewService(&fakeToolCaller{ + tools: map[string]func(map[string]interface{}) *mcp.ToolResult{ + "list_projects": func(params map[string]interface{}) *mcp.ToolResult { + return jsonToolResult(t, map[string]interface{}{ + "projects": []map[string]interface{}{ + { + "name": "app-fleet-cache-membership-backend", + "root_path": "/app/fleet-cache/membership-backend", + "nodes": 5942, + "edges": 11602, + }, + { + "name": "app-fleet-cache-ghl-membership-frontend", + "root_path": "/app/fleet-cache/ghl-membership-frontend", + "nodes": 10287, + "edges": 15213, + }, + }, + }) + }, + "search_graph": func(params map[string]interface{}) *mcp.ToolResult { + project, _ := params["project"].(string) + switch project { + case "app-fleet-cache-membership-backend": + return jsonToolResult(t, map[string]interface{}{ + "total": 4, + "results": []map[string]interface{}{ + {"label": "Function", "name": "acquireCheckoutLock", "rank": -14.0}, + }, + }) + case "app-fleet-cache-ghl-membership-frontend": + return jsonToolResult(t, map[string]interface{}{ + "total": 1, + "results": []map[string]interface{}{ + {"label": "Component", "name": "CheckoutPage", "rank": -2.0}, + }, + }) + default: + return jsonToolResult(t, map[string]interface{}{"total": 0, "results": []map[string]interface{}{}}) + } + }, + "get_architecture": func(params map[string]interface{}) *mcp.ToolResult { + project, _ := params["project"].(string) + if project == "app-fleet-cache-membership-backend" { + return jsonToolResult(t, map[string]interface{}{ + "project": project, + "total_nodes": 5942, + "total_edges": 11602, + "node_labels": []map[string]interface{}{{"label": "Function", "count": 600}}, + "edge_types": []map[string]interface{}{{"type": "CALLS", "count": 1800}}, + }) + } + return jsonToolResult(t, map[string]interface{}{ + "project": project, + "total_nodes": 10287, + "total_edges": 15213, + "node_labels": []map[string]interface{}{{"label": "Component", "count": 420}}, + "edge_types": []map[string]interface{}{{"type": "IMPORTS", "count": 2000}}, + }) + }, + }, + }, manifest.Manifest{ + Repos: []manifest.Repo{ + {Name: "membership-backend", Team: "revex", Type: "service", Tags: []string{"membership", "checkout", "contact"}}, + {Name: "ghl-membership-frontend", Team: "revex", Type: "frontend", Tags: []string{"membership", "checkout"}}, + }, + }, Options{MaxBM25Candidates: 5, MaxGraphCandidates: 3}) + + resp, err := svc.DiscoverProjects(context.Background(), Request{ + Query: "add lock in membership checkout flow for contact purchases", + Limit: 5, + IncludeGraphConfidence: true, + }) + if err != nil { + t.Fatalf("DiscoverProjects: %v", err) + } + if len(resp.PrimaryRepos) == 0 { + t.Fatal("expected at least one primary repo") + } + if got := resp.PrimaryRepos[0].RepoSlug; got != "membership-backend" { + t.Fatalf("top repo: want membership-backend, got %q", got) + } +} + +func TestDiscoverProjectsPenalizesPlaceholderIndexes(t *testing.T) { + svc := NewService(&fakeToolCaller{ + tools: map[string]func(map[string]interface{}) *mcp.ToolResult{ + "list_projects": func(params map[string]interface{}) *mcp.ToolResult { + return jsonToolResult(t, map[string]interface{}{ + "projects": []map[string]interface{}{ + { + "name": "app-fleet-cache-membership-backend", + "root_path": "/app/fleet-cache/membership-backend", + "nodes": 1, + "edges": 0, + }, + { + "name": "app-fleet-cache-ghl-membership-frontend", + "root_path": "/app/fleet-cache/ghl-membership-frontend", + "nodes": 1200, + "edges": 2400, + }, + }, + }) + }, + "search_graph": func(params map[string]interface{}) *mcp.ToolResult { + project, _ := params["project"].(string) + if project == "app-fleet-cache-membership-backend" { + return jsonToolResult(t, map[string]interface{}{ + "total": 3, + "results": []map[string]interface{}{ + {"label": "Function", "name": "fakeMatch", "rank": -12.0}, + }, + }) + } + return jsonToolResult(t, map[string]interface{}{ + "total": 2, + "results": []map[string]interface{}{ + {"label": "Component", "name": "CheckoutPage", "rank": -5.0}, + }, + }) + }, + "get_architecture": func(params map[string]interface{}) *mcp.ToolResult { + project, _ := params["project"].(string) + if project == "app-fleet-cache-membership-backend" { + return jsonToolResult(t, map[string]interface{}{ + "project": project, + "total_nodes": 1, + "total_edges": 0, + }) + } + return jsonToolResult(t, map[string]interface{}{ + "project": project, + "total_nodes": 1200, + "total_edges": 2400, + }) + }, + }, + }, manifest.Manifest{ + Repos: []manifest.Repo{ + {Name: "membership-backend", Team: "revex", Type: "service", Tags: []string{"membership", "checkout"}}, + {Name: "ghl-membership-frontend", Team: "revex", Type: "frontend", Tags: []string{"membership", "checkout"}}, + }, + }, Options{MaxBM25Candidates: 5, MaxGraphCandidates: 3}) + + resp, err := svc.DiscoverProjects(context.Background(), Request{ + Query: "membership checkout", + Limit: 5, + IncludeGraphConfidence: true, + }) + if err != nil { + t.Fatalf("DiscoverProjects: %v", err) + } + if len(resp.PrimaryRepos) == 0 { + t.Fatal("expected at least one primary repo") + } + if got := resp.PrimaryRepos[0].RepoSlug; got != "ghl-membership-frontend" { + t.Fatalf("top repo after placeholder penalty: want ghl-membership-frontend, got %q", got) + } +} + +func TestDiscoverProjectsReturnsCrossRepoCandidates(t *testing.T) { + svc := NewService(&fakeToolCaller{ + tools: map[string]func(map[string]interface{}) *mcp.ToolResult{ + "list_projects": func(params map[string]interface{}) *mcp.ToolResult { + return jsonToolResult(t, map[string]interface{}{ + "projects": []map[string]interface{}{ + { + "name": "app-fleet-cache-membership-backend", + "root_path": "/app/fleet-cache/membership-backend", + "nodes": 5942, + "edges": 11602, + }, + { + "name": "app-fleet-cache-ghl-membership-frontend", + "root_path": "/app/fleet-cache/ghl-membership-frontend", + "nodes": 10287, + "edges": 15213, + }, + }, + }) + }, + "search_graph": func(params map[string]interface{}) *mcp.ToolResult { + project, _ := params["project"].(string) + switch project { + case "app-fleet-cache-membership-backend": + return jsonToolResult(t, map[string]interface{}{ + "total": 3, + "results": []map[string]interface{}{ + {"label": "Function", "name": "checkoutContactLock", "rank": -10.0}, + }, + }) + case "app-fleet-cache-ghl-membership-frontend": + return jsonToolResult(t, map[string]interface{}{ + "total": 3, + "results": []map[string]interface{}{ + {"label": "Component", "name": "CheckoutLockBanner", "rank": -9.0}, + }, + }) + default: + return jsonToolResult(t, map[string]interface{}{"total": 0, "results": []map[string]interface{}{}}) + } + }, + "get_architecture": func(params map[string]interface{}) *mcp.ToolResult { + project, _ := params["project"].(string) + if project == "app-fleet-cache-membership-backend" { + return jsonToolResult(t, map[string]interface{}{ + "project": project, + "total_nodes": 5942, + "total_edges": 11602, + "node_labels": []map[string]interface{}{{"label": "Function", "count": 600}}, + }) + } + return jsonToolResult(t, map[string]interface{}{ + "project": project, + "total_nodes": 10287, + "total_edges": 15213, + "node_labels": []map[string]interface{}{{"label": "Component", "count": 420}}, + }) + }, + }, + }, manifest.Manifest{ + Repos: []manifest.Repo{ + {Name: "membership-backend", Team: "revex", Type: "service", Tags: []string{"membership", "checkout", "contact"}}, + {Name: "ghl-membership-frontend", Team: "revex", Type: "frontend", Tags: []string{"membership", "checkout", "ui"}}, + }, + }, Options{MaxBM25Candidates: 5, MaxGraphCandidates: 3}) + + resp, err := svc.DiscoverProjects(context.Background(), Request{ + Query: "add checkout lock ui and backend validation for membership contact purchases", + Limit: 5, + IncludeGraphConfidence: true, + }) + if err != nil { + t.Fatalf("DiscoverProjects: %v", err) + } + if !resp.CrossRepo { + t.Fatal("expected cross_repo=true") + } + if len(resp.PrimaryRepos)+len(resp.RelatedRepos) < 2 { + t.Fatalf("expected at least two repos, got primary=%d related=%d", len(resp.PrimaryRepos), len(resp.RelatedRepos)) + } +} diff --git a/ghl/internal/discovery/service.go b/ghl/internal/discovery/service.go new file mode 100644 index 00000000..67205afc --- /dev/null +++ b/ghl/internal/discovery/service.go @@ -0,0 +1,586 @@ +package discovery + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "math" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/manifest" + "github.com/GoHighLevel/codebase-memory-mcp/ghl/internal/mcp" +) + +// ToolCaller is the subset of MCP client behavior discovery needs. +type ToolCaller interface { + CallTool(ctx context.Context, name string, params map[string]interface{}) (*mcp.ToolResult, error) +} + +// Options tunes candidate narrowing and scoring depth. +type Options struct { + MaxBM25Candidates int + MaxGraphCandidates int + RequestTimeout time.Duration +} + +type indexedProject struct { + Name string `json:"name"` + RootPath string `json:"root_path"` + Nodes int `json:"nodes"` + Edges int `json:"edges"` +} + +type listProjectsPayload struct { + Projects []indexedProject `json:"projects"` +} + +type searchGraphPayload struct { + Total int `json:"total"` + Results []searchGraphHit `json:"results"` + SemanticResults []semanticGraphHit `json:"semantic_results"` +} + +type searchGraphHit struct { + Name string `json:"name"` + QualifiedName string `json:"qualified_name"` + Label string `json:"label"` + FilePath string `json:"file_path"` + Rank float64 `json:"rank"` +} + +type semanticGraphHit struct { + Name string `json:"name"` + QualifiedName string `json:"qualified_name"` + Label string `json:"label"` + FilePath string `json:"file_path"` + Score float64 `json:"score"` +} + +type architecturePayload struct { + Project string `json:"project"` + TotalNodes int `json:"total_nodes"` + TotalEdges int `json:"total_edges"` + NodeLabels []labelStat `json:"node_labels"` +} + +type labelStat struct { + Label string `json:"label"` + Count int `json:"count"` +} + +type catalogEntry struct { + Project string + RepoSlug string + RootPath string + Nodes int + Edges int + Team string + Type string + Tags []string +} + +type candidateScore struct { + Candidate + indexed catalogEntry +} + +// Discoverer implements the discovery Service. +type Discoverer struct { + caller ToolCaller + manifest manifest.Manifest + opts Options + + mu sync.RWMutex + catalog []catalogEntry +} + +// NewService constructs a discoverer with sane defaults. +func NewService(caller ToolCaller, m manifest.Manifest, opts Options) *Discoverer { + if opts.MaxBM25Candidates <= 0 { + opts.MaxBM25Candidates = 5 + } + if opts.MaxGraphCandidates <= 0 { + opts.MaxGraphCandidates = 3 + } + if opts.RequestTimeout <= 0 { + opts.RequestTimeout = 5 * time.Second + } + return &Discoverer{ + caller: caller, + manifest: m, + opts: opts, + } +} + +func (d *Discoverer) Definition() ToolDefinition { + return NewDefinition() +} + +// Invalidate clears the in-memory project catalog so the next request refreshes it. +func (d *Discoverer) Invalidate() { + d.mu.Lock() + defer d.mu.Unlock() + d.catalog = nil +} + +func (d *Discoverer) DiscoverProjects(ctx context.Context, req Request) (Response, error) { + if strings.TrimSpace(req.Query) == "" { + return Response{}, errors.New("query is required") + } + if req.Limit <= 0 { + req.Limit = 5 + } + + if _, ok := ctx.Deadline(); !ok { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, d.opts.RequestTimeout) + defer cancel() + } + + catalog, err := d.ensureCatalog(ctx) + if err != nil { + return Response{}, err + } + if len(catalog) == 0 { + return Response{Query: req.Query}, nil + } + + queryTokens := tokenize(req.Query) + candidates := d.initialCandidates(req.Query, queryTokens, catalog) + if len(candidates) == 0 { + return Response{Query: req.Query}, nil + } + + if err := d.applyBM25Scores(ctx, req, queryTokens, candidates); err != nil { + return Response{}, err + } + if req.IncludeGraphConfidence { + if err := d.applyGraphConfidence(ctx, candidates); err != nil { + return Response{}, err + } + } + + sort.SliceStable(candidates, func(i, j int) bool { + if candidates[i].Score == candidates[j].Score { + return candidates[i].RepoSlug < candidates[j].RepoSlug + } + return candidates[i].Score > candidates[j].Score + }) + + resp := Response{Query: req.Query} + topScore := candidates[0].Score + primaryCutoff := math.Max(0.55, topScore-0.12) + for _, cand := range candidates { + cand.Confidence = confidenceFromScore(cand.Score) + if len(resp.PrimaryRepos) == 0 || (cand.Score >= primaryCutoff && len(resp.PrimaryRepos) < min(req.Limit, 3)) { + resp.PrimaryRepos = append(resp.PrimaryRepos, cand.Candidate) + continue + } + if cand.Score >= 0.30 && len(resp.PrimaryRepos)+len(resp.RelatedRepos) < req.Limit { + resp.RelatedRepos = append(resp.RelatedRepos, cand.Candidate) + } + } + resp.CrossRepo = len(resp.PrimaryRepos)+len(resp.RelatedRepos) > 1 + return resp, nil +} + +func (d *Discoverer) ensureCatalog(ctx context.Context) ([]catalogEntry, error) { + d.mu.RLock() + if d.catalog != nil { + cached := append([]catalogEntry(nil), d.catalog...) + d.mu.RUnlock() + return cached, nil + } + d.mu.RUnlock() + return d.refreshCatalog(ctx) +} + +func (d *Discoverer) refreshCatalog(ctx context.Context) ([]catalogEntry, error) { + result, err := d.caller.CallTool(ctx, "list_projects", nil) + if err != nil { + return nil, fmt.Errorf("list_projects: %w", err) + } + + var payload listProjectsPayload + if err := decodeToolPayload(result, &payload); err != nil { + return nil, fmt.Errorf("decode list_projects: %w", err) + } + + manifestByName := make(map[string]manifest.Repo, len(d.manifest.Repos)) + for _, repo := range d.manifest.Repos { + manifestByName[strings.ToLower(repo.Name)] = repo + } + + catalog := make([]catalogEntry, 0, len(payload.Projects)) + for _, project := range payload.Projects { + slug := deriveRepoSlug(project.Name, project.RootPath, manifestByName) + entry := catalogEntry{ + Project: project.Name, + RepoSlug: slug, + RootPath: project.RootPath, + Nodes: project.Nodes, + Edges: project.Edges, + } + if repo, ok := manifestByName[strings.ToLower(slug)]; ok { + entry.Team = repo.Team + entry.Type = repo.Type + entry.Tags = append([]string(nil), repo.Tags...) + } + catalog = append(catalog, entry) + } + + d.mu.Lock() + d.catalog = append([]catalogEntry(nil), catalog...) + d.mu.Unlock() + return catalog, nil +} + +func deriveRepoSlug(projectName, rootPath string, manifestByName map[string]manifest.Repo) string { + if base := strings.TrimSpace(filepath.Base(rootPath)); base != "" && base != "." && base != string(filepath.Separator) { + return base + } + lowerProject := strings.ToLower(projectName) + if _, ok := manifestByName[lowerProject]; ok { + return projectName + } + prefixes := []string{ + "app-fleet-cache-", + "data-fleet-cache-", + "tmp-fleet-cache-", + "fleet-cache-", + } + for _, prefix := range prefixes { + if strings.HasPrefix(lowerProject, prefix) { + return projectName[len(prefix):] + } + } + return projectName +} + +func (d *Discoverer) initialCandidates(query string, queryTokens []string, catalog []catalogEntry) []candidateScore { + candidates := make([]candidateScore, 0, len(catalog)) + for _, entry := range catalog { + score, reasons := metadataScore(query, queryTokens, entry) + candidates = append(candidates, candidateScore{ + Candidate: Candidate{ + Project: entry.Project, + RepoSlug: entry.RepoSlug, + Score: score, + Reasons: reasons, + }, + indexed: entry, + }) + } + + sort.SliceStable(candidates, func(i, j int) bool { + if candidates[i].Score == candidates[j].Score { + return healthScore(candidates[i].indexed) > healthScore(candidates[j].indexed) + } + return candidates[i].Score > candidates[j].Score + }) + + limit := min(len(candidates), d.opts.MaxBM25Candidates) + if limit == 0 { + return nil + } + + selected := append([]candidateScore(nil), candidates[:limit]...) + allZero := true + for _, candidate := range selected { + if candidate.Score > 0 { + allZero = false + break + } + } + if allZero { + sort.SliceStable(candidates, func(i, j int) bool { + return healthScore(candidates[i].indexed) > healthScore(candidates[j].indexed) + }) + selected = append([]candidateScore(nil), candidates[:limit]...) + } + return selected +} + +func metadataScore(query string, queryTokens []string, entry catalogEntry) (float64, []string) { + var score float64 + var reasons []string + + lowerQuery := strings.ToLower(query) + lowerSlug := strings.ToLower(entry.RepoSlug) + if lowerSlug != "" && strings.Contains(lowerQuery, lowerSlug) { + score += 0.35 + reasons = append(reasons, "repo slug appears directly in task") + } + + slugTokens := tokenSet(tokenize(lowerSlug)) + tagTokens := tokenSet(entry.Tags) + for _, token := range queryTokens { + if _, ok := slugTokens[token]; ok { + score += 0.12 + reasons = append(reasons, fmt.Sprintf("name token match: %s", token)) + continue + } + if _, ok := tagTokens[token]; ok { + score += 0.08 + reasons = append(reasons, fmt.Sprintf("tag match: %s", token)) + continue + } + if token == strings.ToLower(entry.Team) || token == strings.ToLower(entry.Type) { + score += 0.04 + reasons = append(reasons, fmt.Sprintf("metadata match: %s", token)) + } + } + + if entry.Nodes > 0 && entry.Edges > 0 { + score += 0.03 + } + if entry.Nodes <= 1 || entry.Edges == 0 { + score -= 0.15 + reasons = append(reasons, "indexed project is shallow") + } + + return clamp(score, 0, 0.75), dedupeStrings(reasons) +} + +func (d *Discoverer) applyBM25Scores(ctx context.Context, req Request, queryTokens []string, candidates []candidateScore) error { + for i := range candidates { + args := map[string]interface{}{ + "project": candidates[i].Project, + "query": req.Query, + "limit": 8, + } + if req.IncludeSemantic { + if semanticKeywords := semanticKeywords(queryTokens); len(semanticKeywords) > 0 { + args["semantic_query"] = semanticKeywords + } + } + + result, err := d.caller.CallTool(ctx, "search_graph", args) + if err != nil { + return fmt.Errorf("search_graph %s: %w", candidates[i].Project, err) + } + + var payload searchGraphPayload + if err := decodeToolPayload(result, &payload); err != nil { + return fmt.Errorf("decode search_graph %s: %w", candidates[i].Project, err) + } + + add, reasons := bm25Score(payload) + candidates[i].Score = clamp(candidates[i].Score+add, 0, 1.0) + candidates[i].Reasons = dedupeStrings(append(candidates[i].Reasons, reasons...)) + + if req.IncludeSemantic { + semAdd, semReasons := semanticScore(payload) + candidates[i].Score = clamp(candidates[i].Score+semAdd, 0, 1.0) + candidates[i].Reasons = dedupeStrings(append(candidates[i].Reasons, semReasons...)) + } + } + return nil +} + +func bm25Score(payload searchGraphPayload) (float64, []string) { + if payload.Total <= 0 || len(payload.Results) == 0 { + return 0, []string{"no BM25 code hits"} + } + + score := math.Min(float64(payload.Total), 8) / 8 * 0.30 + best := payload.Results[0] + score += labelWeight(best.Label) + + reasons := []string{ + fmt.Sprintf("BM25 hit count: %d", payload.Total), + fmt.Sprintf("top hit label: %s", best.Label), + } + return clamp(score, 0, 0.50), reasons +} + +func semanticScore(payload searchGraphPayload) (float64, []string) { + if len(payload.SemanticResults) == 0 { + return 0, nil + } + + best := payload.SemanticResults[0].Score + score := clamp(best*0.08, 0, 0.08) + reasons := []string{fmt.Sprintf("semantic hits: %d", len(payload.SemanticResults))} + return score, reasons +} + +func (d *Discoverer) applyGraphConfidence(ctx context.Context, candidates []candidateScore) error { + sort.SliceStable(candidates, func(i, j int) bool { return candidates[i].Score > candidates[j].Score }) + + limit := min(len(candidates), d.opts.MaxGraphCandidates) + for i := 0; i < limit; i++ { + result, err := d.caller.CallTool(ctx, "get_architecture", map[string]interface{}{ + "project": candidates[i].Project, + }) + if err != nil { + return fmt.Errorf("get_architecture %s: %w", candidates[i].Project, err) + } + + var payload architecturePayload + if err := decodeToolPayload(result, &payload); err != nil { + return fmt.Errorf("decode get_architecture %s: %w", candidates[i].Project, err) + } + + add, reasons := graphConfidenceScore(payload) + candidates[i].Score = clamp(candidates[i].Score+add, 0, 1.0) + candidates[i].Reasons = dedupeStrings(append(candidates[i].Reasons, reasons...)) + } + return nil +} + +func graphConfidenceScore(payload architecturePayload) (float64, []string) { + if payload.TotalNodes <= 1 || payload.TotalEdges == 0 { + return -0.40, []string{"graph confidence penalty: project-only or placeholder index"} + } + + score := 0.0 + reasons := []string{ + fmt.Sprintf("graph depth: %d nodes / %d edges", payload.TotalNodes, payload.TotalEdges), + } + + if payload.TotalNodes > 100 && payload.TotalEdges > 100 { + score += 0.10 + } + + for _, label := range payload.NodeLabels { + switch label.Label { + case "Function", "Method", "Route", "Class", "Component": + if label.Count > 0 { + score += 0.05 + reasons = append(reasons, fmt.Sprintf("architecture contains %s nodes", label.Label)) + return clamp(score, -0.40, 0.15), dedupeStrings(reasons) + } + } + } + return clamp(score, -0.40, 0.15), dedupeStrings(reasons) +} + +func decodeToolPayload(result *mcp.ToolResult, out interface{}) error { + if result == nil { + return errors.New("missing tool result") + } + if result.IsError { + msg := "tool returned error" + if len(result.Content) > 0 { + msg = result.Content[0].Text + } + return errors.New(msg) + } + for _, item := range result.Content { + if item.Type != "text" || strings.TrimSpace(item.Text) == "" { + continue + } + return json.Unmarshal([]byte(item.Text), out) + } + return errors.New("missing JSON text content") +} + +func tokenize(input string) []string { + replacer := strings.NewReplacer("-", " ", "_", " ", "/", " ", ".", " ", ":", " ") + normalized := strings.ToLower(replacer.Replace(input)) + fields := strings.Fields(normalized) + tokens := make([]string, 0, len(fields)) + for _, field := range fields { + field = strings.TrimSpace(field) + if field == "" { + continue + } + tokens = append(tokens, field) + } + return dedupeStrings(tokens) +} + +func semanticKeywords(tokens []string) []string { + stop := map[string]struct{}{ + "add": {}, "for": {}, "the": {}, "and": {}, "flow": {}, "in": {}, "a": {}, "an": {}, + } + out := make([]string, 0, len(tokens)) + for _, token := range tokens { + if _, ok := stop[token]; ok { + continue + } + out = append(out, token) + if len(out) == 5 { + break + } + } + return out +} + +func tokenSet(tokens []string) map[string]struct{} { + set := make(map[string]struct{}, len(tokens)) + for _, token := range tokens { + token = strings.ToLower(strings.TrimSpace(token)) + if token == "" { + continue + } + set[token] = struct{}{} + } + return set +} + +func labelWeight(label string) float64 { + switch label { + case "Function", "Method": + return 0.15 + case "Route": + return 0.13 + case "Class", "Interface", "Type", "Enum": + return 0.10 + case "Component": + return 0.08 + default: + return 0.03 + } +} + +func healthScore(entry catalogEntry) int { + return entry.Nodes + entry.Edges +} + +func confidenceFromScore(score float64) string { + switch { + case score >= 0.75: + return "high" + case score >= 0.50: + return "medium" + default: + return "low" + } +} + +func dedupeStrings(values []string) []string { + seen := make(map[string]struct{}, len(values)) + out := make([]string, 0, len(values)) + for _, value := range values { + if _, ok := seen[value]; ok { + continue + } + seen[value] = struct{}{} + out = append(out, value) + } + return out +} + +func clamp(value, minValue, maxValue float64) float64 { + if value < minValue { + return minValue + } + if value > maxValue { + return maxValue + } + return value +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/ghl/internal/enricher/consumer_cascade.go b/ghl/internal/enricher/consumer_cascade.go new file mode 100644 index 00000000..a766b62e --- /dev/null +++ b/ghl/internal/enricher/consumer_cascade.go @@ -0,0 +1,163 @@ +package enricher + +import ( + "regexp" + "strings" +) + +// SideEffectKind classifies what a consumer worker produces for users. +type SideEffectKind string + +const ( + SideEffectEmail SideEffectKind = "email" + SideEffectDrip SideEffectKind = "drip_sequence" + SideEffectWebhook SideEffectKind = "webhook" + SideEffectAnalytics SideEffectKind = "analytics" + SideEffectAccessGrant SideEffectKind = "access_grant" + SideEffectNotification SideEffectKind = "notification" + SideEffectUnknown SideEffectKind = "unknown" +) + +// ConsumerSideEffect describes one side effect a consumer worker produces. +type ConsumerSideEffect struct { + Kind SideEffectKind + Description string + IsSilent bool // workers run in background — user sees no error + Severity string // "HIGH" | "MEDIUM" | "LOW" +} + +// ConsumerCascadeResult describes the full cascade impact of a changed consumer file. +type ConsumerCascadeResult struct { + Topic string + Role string + FilePath string + SideEffects []ConsumerSideEffect + MaxSeverity string + UserImpactSummary string +} + +// sideEffectRule matches source patterns to classify worker side effects. +type sideEffectRule struct { + Kind SideEffectKind + Pattern *regexp.Regexp + Description string + Severity string +} + +var sideEffectRules = []sideEffectRule{ + { + SideEffectAccessGrant, + regexp.MustCompile(`grantAccess|AccessGrant|membershipAccess|grantMembership|accessGrantService`), + "User access grant broken — purchased product not accessible", + "HIGH", + }, + { + SideEffectEmail, + regexp.MustCompile(`sendEmail|sendMail|mailerService|MailerService|nodemailer|@nestjs/mailer`), + "Welcome/confirmation emails not sent to user", + "MEDIUM", + }, + { + SideEffectDrip, + regexp.MustCompile(`dripService|DripService|addToDrip|createDrip|drip.*sequence`), + "Drip sequences not started for user", + "MEDIUM", + }, + { + SideEffectWebhook, + regexp.MustCompile(`webhookService|WebhookService|triggerWebhook|sendWebhook|externalTrigger|ExternalTrigger`), + "External webhook triggers not fired", + "MEDIUM", + }, + { + SideEffectNotification, + regexp.MustCompile(`notificationService|NotificationService|pushNotification|fcmService|sendPush`), + "Push notifications not sent", + "MEDIUM", + }, + { + SideEffectAnalytics, + regexp.MustCompile(`analyticsService|AnalyticsService|\.track\(|segment\.track|mixpanel|amplitude`), + "Analytics events not tracked", + "LOW", + }, +} + +// ExtractConsumerSideEffects analyzes a consumer worker's source code to +// determine what side effects it produces. Returns nil for empty source. +func ExtractConsumerSideEffects(source string) []ConsumerSideEffect { + if strings.TrimSpace(source) == "" { + return nil + } + var effects []ConsumerSideEffect + for _, rule := range sideEffectRules { + if rule.Pattern.MatchString(source) { + effects = append(effects, ConsumerSideEffect{ + Kind: rule.Kind, + Description: rule.Description, + IsSilent: true, + Severity: rule.Severity, + }) + } + } + return effects +} + +// ResolveConsumerCascade resolves downstream side effects for consumer events +// found in a changed file. Producer events are skipped. +// Returns nil when no consumer events produce detectable side effects. +func ResolveConsumerCascade(consumerEvents []EventPatternCall, source string, topicReg *TopicRegistry) []ConsumerCascadeResult { + var results []ConsumerCascadeResult + for _, ev := range consumerEvents { + if ev.Role != "consumer" { + continue + } + effects := ExtractConsumerSideEffects(source) + if len(effects) == 0 { + continue + } + + maxSev := "LOW" + for _, e := range effects { + switch e.Severity { + case "HIGH": + maxSev = "HIGH" + case "MEDIUM": + if maxSev == "LOW" { + maxSev = "MEDIUM" + } + } + } + + var descs []string + for _, e := range effects { + descs = append(descs, e.Description) + } + summary := strings.Join(descs, "; ") + + // Augment with topic registry user_impact if available. + if topicReg != nil { + if impact := topicReg.LookupByTopicID(ev.Topic); impact != nil { + for _, pa := range impact.ProductAreas { + if pa.UserImpact != "" { + summary = pa.UserImpact + " (" + summary + ")" + break + } + } + } + } + + results = append(results, ConsumerCascadeResult{ + Topic: ev.Topic, + Role: ev.Role, + FilePath: ev.FilePath, + SideEffects: effects, + MaxSeverity: maxSev, + UserImpactSummary: summary, + }) + } + if len(results) == 0 { + return nil + } + return results +} diff --git a/ghl/internal/enricher/consumer_cascade_test.go b/ghl/internal/enricher/consumer_cascade_test.go new file mode 100644 index 00000000..248598e5 --- /dev/null +++ b/ghl/internal/enricher/consumer_cascade_test.go @@ -0,0 +1,159 @@ +package enricher + +import ( + "testing" +) + +func TestExtractConsumerSideEffects_EmptySource_ReturnsNil(t *testing.T) { + if got := ExtractConsumerSideEffects(""); got != nil { + t.Errorf("expected nil, got %v", got) + } +} + +func TestExtractConsumerSideEffects_DetectsEmail(t *testing.T) { + src := `await this.mailerService.sendMail({to: user.email})` + got := ExtractConsumerSideEffects(src) + if len(got) != 1 || got[0].Kind != SideEffectEmail { + t.Errorf("got %+v", got) + } + if !got[0].IsSilent { + t.Errorf("IsSilent should be true for worker side effects") + } +} + +func TestExtractConsumerSideEffects_DetectsWebhook(t *testing.T) { + src := `await this.webhookService.triggerWebhook(payload)` + got := ExtractConsumerSideEffects(src) + if len(got) != 1 || got[0].Kind != SideEffectWebhook { + t.Errorf("got %+v", got) + } +} + +func TestExtractConsumerSideEffects_DetectsDrip(t *testing.T) { + src := `await this.dripService.addToDrip(contactId, sequence)` + got := ExtractConsumerSideEffects(src) + if len(got) == 0 || got[0].Kind != SideEffectDrip { + t.Errorf("got %+v", got) + } +} + +func TestExtractConsumerSideEffects_DetectsAccessGrant(t *testing.T) { + src := `await this.accessGrantService.grantAccess(memberId, offerId)` + got := ExtractConsumerSideEffects(src) + if len(got) == 0 { + t.Fatal("no side effects detected") + } + found := false + for _, e := range got { + if e.Kind == SideEffectAccessGrant && e.Severity == "HIGH" { + found = true + } + } + if !found { + t.Errorf("expected HIGH-severity access_grant, got %+v", got) + } +} + +func TestExtractConsumerSideEffects_DetectsAnalytics(t *testing.T) { + src := `this.analyticsService.track('checkout.completed', payload)` + got := ExtractConsumerSideEffects(src) + if len(got) == 0 { + t.Fatal("no side effects detected") + } + found := false + for _, e := range got { + if e.Kind == SideEffectAnalytics && e.Severity == "LOW" { + found = true + } + } + if !found { + t.Errorf("expected LOW-severity analytics, got %+v", got) + } +} + +func TestExtractConsumerSideEffects_DetectsMultiple(t *testing.T) { + src := ` + await this.mailerService.sendMail(...) + await this.dripService.createDrip(...) + await this.webhookService.triggerWebhook(...) + ` + got := ExtractConsumerSideEffects(src) + if len(got) < 3 { + t.Errorf("expected ≥3 side effects, got %d", len(got)) + } +} + +func TestResolveConsumerCascade_ProducerEventsSkipped(t *testing.T) { + events := []EventPatternCall{{Topic: "X", Role: "producer"}} + got := ResolveConsumerCascade(events, "this.dripService.addToDrip()", nil) + if got != nil { + t.Errorf("expected nil for producer-only events, got %v", got) + } +} + +func TestResolveConsumerCascade_EmptyEvents_ReturnsNil(t *testing.T) { + if got := ResolveConsumerCascade(nil, "source", nil); got != nil { + t.Errorf("expected nil, got %v", got) + } +} + +func TestResolveConsumerCascade_ConsumerWithSideEffects(t *testing.T) { + events := []EventPatternCall{{Topic: "CHECKOUT_INTEGRATIONS", Role: "consumer"}} + src := `await this.dripService.addToDrip(id); await this.webhookService.triggerWebhook(p);` + got := ResolveConsumerCascade(events, src, nil) + if len(got) != 1 { + t.Fatalf("expected 1 result, got %d", len(got)) + } + if len(got[0].SideEffects) < 2 { + t.Errorf("expected ≥2 side effects, got %d", len(got[0].SideEffects)) + } +} + +func TestResolveConsumerCascade_NilTopicRegistry_StillWorks(t *testing.T) { + events := []EventPatternCall{{Topic: "X", Role: "consumer"}} + src := `this.dripService.addToDrip()` + got := ResolveConsumerCascade(events, src, nil) + if len(got) != 1 { + t.Errorf("expected 1 result even with nil registry, got %d", len(got)) + } +} + +func TestResolveConsumerCascade_MaxSeverityFromAccessGrant(t *testing.T) { + events := []EventPatternCall{{Topic: "X", Role: "consumer"}} + src := `this.accessGrantService.grantAccess(memberId, offerId); this.analyticsService.track('x');` + got := ResolveConsumerCascade(events, src, nil) + if len(got) != 1 || got[0].MaxSeverity != "HIGH" { + t.Errorf("expected MaxSeverity=HIGH, got %+v", got) + } +} + +func TestResolveConsumerCascade_PR10133_CheckoutIntegrationsWorker(t *testing.T) { + src := ` + @EventPattern(CheckoutOrchestrationWorkerEvent.CHECKOUT_ORCHESTRATION_INTEGRATIONS) + async handleCheckoutIntegrations(payload) { + await this.dripService.createDrip(payload.contactId, payload.offerId); + await this.externalTriggerService.triggerWebhook(payload); + await this.analyticsService.track('checkout.completed', payload); + } + ` + events := []EventPatternCall{{Topic: "CHECKOUT_INTEGRATIONS", Role: "consumer", Symbol: "CheckoutIntegrationsWorker"}} + got := ResolveConsumerCascade(events, src, nil) + if len(got) == 0 { + t.Fatal("PR #10133: expected cascade results, got none") + } + kinds := make(map[SideEffectKind]bool) + for _, se := range got[0].SideEffects { + kinds[se.Kind] = true + if !se.IsSilent { + t.Errorf("PR #10133: side effect %v should be silent", se.Kind) + } + } + for _, want := range []SideEffectKind{SideEffectDrip, SideEffectWebhook, SideEffectAnalytics} { + if !kinds[want] { + t.Errorf("PR #10133: expected side effect %v in results", want) + } + } + if got[0].UserImpactSummary == "" { + t.Errorf("PR #10133: UserImpactSummary should not be empty") + } +} diff --git a/ghl/internal/enricher/customer_surface.go b/ghl/internal/enricher/customer_surface.go new file mode 100644 index 00000000..0f0e310f --- /dev/null +++ b/ghl/internal/enricher/customer_surface.go @@ -0,0 +1,506 @@ +// Package enricher — customer_surface.go +// +// Composite enricher that fuses ProductMap + Vue metadata + FE fetch calls +// into a single CustomerSurface record. This is the data shape the MCP +// composite tool (`codebase-memory_customer-surface`) returns to downstream +// customer-impact analyzers. +// +// Design: +// - Pure computation, no I/O. Source and ProductMap are passed in. +// MCP tool handlers do the I/O (SQLite lookups, file reads). +// - Graceful degradation: a missing product mapping yields a labelled +// "Unknown — no product mapping" surface rather than an error. Backend- +// only files yield records with empty component fields. Empty source +// yields a minimal record with just identity + product. +// - Existing enricher output types (FetchCall, VueComponentMetadata, +// ProductInfo) are reused verbatim — no new struct wrapping them. +// +// Callers (MCP tool handlers) iterate a list of (repo, file, source) tuples +// and collect the []CustomerSurface output. The customer-impact analyzer +// skill then renders the final PR-surface panel from this structured data. + +package enricher + +import ( + "context" + "strings" +) + +// UnknownProductLabel is the sentinel used when no product mapping exists for +// a file. Rendered verbatim in user-facing output so the gap is visible +// (per the "show unknowns explicitly" design principle). +const UnknownProductLabel = "Unknown — no product mapping" + +// BuildCustomerSurfaceArgs are the inputs to BuildCustomerSurface. +type BuildCustomerSurfaceArgs struct { + // Repo is the short repo slug (e.g., "platform-backend", "ghl-crm-frontend"). + // Used for ProductMap lookup. + Repo string + // FilePath is the repo-root-relative file path (no leading slash). + FilePath string + // Source is the full file contents (may be empty for deleted files). + Source string + // ProductMap is the loaded product map. Nil is treated as empty → Unknown. + ProductMap *ProductMap + // MFARegistry is the loaded MFA app registry. Nil disables MFA enrichment. + // When provided, SPMT lookup is done by repo; standalone/SSR lookup is done + // by matching NestJS controller paths against backend_api_prefixes. + MFARegistry *MFARegistry + // TopicRegistry maps pub/sub topic identifiers to downstream customer impact. + // Nil disables event-chain impact enrichment. + TopicRegistry *TopicRegistry + // RouteCallersRegistry maps backend path prefixes to frontend callers/MFA keys. + // Nil disables route-callers enrichment. + RouteCallersRegistry *RouteCallersRegistry + // OrgEnricher performs dynamic org-wide search for callers/subscribers. + // Nil disables dynamic org-wide enrichment (static YAML only). + OrgEnricher *OrgEnricher + // Context for OrgEnricher searches. Required when OrgEnricher is set. + Ctx context.Context +} + +// CustomerSurface is the fused per-file output. +type CustomerSurface struct { + // Identity + Repo string + FilePath string + + // Product area (from ProductMap lookup, or UnknownProductLabel) + Product string + Owner string // empty when Product is Unknown + + // Vue component metadata (zero values for non-Vue files) + ComponentName string + HasScriptSetup bool + HasTemplate bool + ScriptLang string // "ts" | "js" | "" (non-Vue) + + // User-facing strings (from Vue template i18n scan) + I18nKeys []string + + // HTTP call sites (works on Vue, TSX, TS, JS) + FetchCalls []FetchCall + + // NestJS controller routes (populated for *.controller.ts files) + NestJSRoutes []RouteInfo + + // DTO contract fields (populated for *.dto.ts / *.dto.js files) + DTOClasses []DTOMetadata + + // Event patterns this file produces or consumes (populated for backend TS) + EventPatterns []EventPatternCall + + // MFA apps associated with this file — either by repo (for FE files) or + // by backend API prefix match (for controller/service files). + // Empty slice means no match; nil means MFARegistry was not provided. + MFAApps []MFAAppRef + + // SemanticProducts classifies the file by what the code DOES (class names, + // decorators, imports) rather than where it lives (file path). Populated for + // any TypeScript file with non-empty source. Nil when source is empty. + SemanticProducts []SemanticProduct + + // EventChainImpacts lists downstream customer impacts from pub/sub topics + // published by this file. Derived from ExtractPublisherStepTopics + + // TopicRegistry lookup. Nil when TopicRegistry is not provided. + EventChainImpacts []TopicImpact + + // RouteCallers lists which frontend repos and MFA apps call the backend + // routes exposed by this file. Derived from NestJS controller prefix + + // routes + RouteCallersRegistry. Nil when RouteCallersRegistry is not provided. + RouteCallers []RouteCallersResult + + // InternalCallImpacts lists downstream service-to-service call impacts + // (e.g. this service calls OFFERS_SERVICE → offers team is impacted). + InternalCallImpacts []InternalCallImpact + + // DTOConsumers lists repos that import the DTO classes defined in this file. + DTOConsumers []DTOConsumerResult + + // MongoReaders lists repos/files that query the same MongoDB collections. + MongoReaders []MongoReaderResult + + // ConsumerCascade describes downstream side effects if this file is a + // consumer worker (emails, drips, webhooks, access grants). + ConsumerCascade []ConsumerCascadeResult + + // EnumDefinitions are enum-like declarations defined in this file. + // Covers TypeScript `enum`, class-static objects, and const-object-as-const + // patterns. Enables cross-repo enum reference tracking that CBM's FTS5 + // index doesn't natively support (dot-notation references tokenize apart). + EnumDefinitions []EnumDefinition + + // EnumReferences are dot-chain references like + // `CheckoutOrchestratorConfig.TOPICS.CHECKOUT_INTEGRATIONS` used in this + // file. One entry per source line. + EnumReferences []EnumReference + + // ImpactReport is the final structured customer-impact summary aggregating + // all signals. Rendered by downstream tooling. + ImpactReport CustomerImpactReport +} + +// BuildCustomerSurface fuses product-area lookup, Vue extraction, FE +// fetch-call extraction, NestJS metadata, DTO extraction, and MFA app +// association into a single record per file. Pure function — +// no file I/O, no network, deterministic given same inputs. +// +// Returns a record (never nil) even when inputs are degenerate (empty source, +// nil ProductMap, etc.). Errors are returned only for unrecoverable conditions; +// the current implementation has none — all partial results degrade +// gracefully. +func BuildCustomerSurface(args BuildCustomerSurfaceArgs) (CustomerSurface, error) { + cs := CustomerSurface{ + Repo: args.Repo, + FilePath: args.FilePath, + } + + // 1. Product area lookup (nil ProductMap is tolerated). + if info, found := args.ProductMap.ProductForFile(args.Repo, args.FilePath); found { + cs.Product = info.Product + cs.Owner = info.Owner + } else { + cs.Product = UnknownProductLabel + cs.Owner = "" + } + + // 2. Vue component extraction — only for .vue files AND non-empty source. + // ExtractVueComponent returns an error when the source has neither + //