diff --git a/.bot/skills/video-read/SKILL.md b/.bot/skills/video-read/SKILL.md new file mode 100644 index 00000000..941f0314 --- /dev/null +++ b/.bot/skills/video-read/SKILL.md @@ -0,0 +1,69 @@ +--- +name: video-read +description: Analyze video files (screen recordings, demos, walkthroughs) using AI vision models. Use when you need to understand, describe, or extract information from video content. Supports local files and Slack-hosted videos. +allowed-tools: Bash +model: haiku +--- + +# Video Read + +Analyze video files using Gemini 2.5 Flash (native video understanding) or GPT-4o (frame extraction fallback). + +## Usage + +**Local video file:** + +```bash +bun ../bot/cli.ts video read -f [-m gemini|gpt4o] [-p "custom prompt"] +``` + +**Slack file by ID:** + +```bash +bun ../bot/cli.ts video read --slack-file [-m gemini|gpt4o] [-p "custom prompt"] +``` + +**Slack file by URL:** + +```bash +bun ../bot/cli.ts video read --slack-url "" [-m gemini|gpt4o] [-p "custom prompt"] +``` + +## Parameters + +- `-f, --file`: Local video file path (.mp4, .webm, .mov, .avi, .mkv) +- `--slack-file`: Slack file ID (auto-downloads then analyzes) +- `--slack-url`: Slack file URL (auto-downloads then analyzes) +- `-m, --model`: AI model to use (default: `gemini`) + - `gemini` — Gemini 2.5 Flash with native video understanding (recommended) + - `gpt4o` — GPT-4o with frame extraction via ffmpeg +- `-p, --prompt`: Custom analysis prompt (optional) + +**Note**: Exactly one of `--file`, `--slack-file`, or `--slack-url` must be provided. + +## Examples + +```bash +# Analyze a screen recording with Gemini (default) +bun ../bot/cli.ts video read -f ./recording.mp4 + +# Use GPT-4o fallback +bun ../bot/cli.ts video read -f ./demo.mp4 -m gpt4o + +# Analyze a Slack-shared video by file ID +bun ../bot/cli.ts video read --slack-file F0AMXCK3JQ1 + +# Custom prompt for bug analysis +bun ../bot/cli.ts video read -f ./bug.mp4 -p "What bug is shown? Steps to reproduce?" +``` + +## Output + +Detailed text description of the video content, followed by model and usage metadata. + +## Notes + +- Gemini sends full video as base64 — best temporal understanding +- GPT-4o extracts frames at 1fps via ffmpeg, samples every other — requires ffmpeg +- Supported: .mp4, .webm, .mov, .avi, .mkv, .m4v +- Env vars: `GEMINI_API_KEY` for Gemini, `OPENAI_API_KEY` for GPT-4o diff --git a/.claude/skills/video-read/SKILL.md b/.claude/skills/video-read/SKILL.md new file mode 100644 index 00000000..33b1b7f8 --- /dev/null +++ b/.claude/skills/video-read/SKILL.md @@ -0,0 +1,69 @@ +--- +name: video-read +description: Analyze video files (screen recordings, demos, walkthroughs) using AI vision models. Use when the user wants to understand, describe, or extract information from video content. Supports local files and Slack-hosted videos. +allowed-tools: Bash +model: haiku +--- + +# Video Read + +Analyze video files using Gemini 2.5 Flash (native video understanding) or GPT-4o (frame extraction fallback). + +## Usage + +**Local video file:** + +```bash +prbot video read -f [-m gemini|gpt4o] [-p "custom prompt"] +``` + +**Slack file by ID:** + +```bash +prbot video read --slack-file [-m gemini|gpt4o] [-p "custom prompt"] +``` + +**Slack file by URL:** + +```bash +prbot video read --slack-url "" [-m gemini|gpt4o] [-p "custom prompt"] +``` + +## Parameters + +- `-f, --file`: Local video file path (.mp4, .webm, .mov, .avi, .mkv) +- `--slack-file`: Slack file ID (auto-downloads then analyzes) +- `--slack-url`: Slack file URL (auto-downloads then analyzes) +- `-m, --model`: AI model to use (default: `gemini`) + - `gemini` — Gemini 2.5 Flash with native video understanding (recommended) + - `gpt4o` — GPT-4o with frame extraction via ffmpeg +- `-p, --prompt`: Custom analysis prompt (optional, default focuses on step-by-step actions, UI state, errors) + +**Note**: Exactly one of `--file`, `--slack-file`, or `--slack-url` must be provided. + +## Examples + +```bash +# Analyze a screen recording with Gemini (default, best quality) +prbot video read -f ./recording.mp4 + +# Use GPT-4o fallback +prbot video read -f ./demo.mp4 -m gpt4o + +# Analyze a video shared in Slack +prbot video read --slack-file F0AMXCK3JQ1 + +# Custom analysis prompt +prbot video read -f ./bug-repro.mp4 -p "What bug is being demonstrated? What are the exact steps to reproduce it?" +``` + +## Output + +Outputs a detailed text description of the video content, followed by model and usage metadata. + +## Notes + +- Gemini path sends the full video as base64 inline data — best for understanding temporal changes +- GPT-4o path extracts frames at 1fps with ffmpeg, samples every other frame — requires ffmpeg installed +- Supported formats: .mp4, .webm, .mov, .avi, .mkv, .m4v +- Requires `GEMINI_API_KEY` env var for Gemini, `OPENAI_API_KEY` for GPT-4o diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 44909106..39f090f5 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -5,11 +5,15 @@ on: branches: - main jobs: - run_comfy_pr: + comfy_pr_test: runs-on: ubuntu-latest + permissions: + contents: write timeout-minutes: 10 steps: - uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} # setup comfy-cli - uses: actions/setup-python@v5 with: @@ -30,6 +34,18 @@ jobs: # setup comfy-pr # Run Comfy-PR Tests - run: bun i + - run: bunx oxlint --fix + - run: bunx oxfmt --write + - name: Commit lint/format fixes + if: github.event.pull_request.head.repo.full_name == github.repository + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + if ! git diff --quiet; then + git add -A + HUSKY=0 git commit -m "style: auto-fix lint and formatting [skip ci]" + git push + fi - run: bun test timeout-minutes: 8 env: diff --git a/.gitignore b/.gitignore index 9bca5af1..c5e087c8 100644 --- a/.gitignore +++ b/.gitignore @@ -79,3 +79,4 @@ tmp/ TODO.md REPORT.md .data +.logs diff --git a/.husky/pre-commit b/.husky/pre-commit index e8f90a56..5b9b4da3 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,5 +1,5 @@ #!/usr/bin/env bun -bunx tsgo -bunx oxlint --fix -bunx oxfmt +bun typecheck +bun lint +bunx lint-staged diff --git a/app/tasks/gh-desktop-release-notification/index.spec.ts b/app/tasks/gh-desktop-release-notification/index.spec.ts index 1a7d5754..a4af5842 100644 --- a/app/tasks/gh-desktop-release-notification/index.spec.ts +++ b/app/tasks/gh-desktop-release-notification/index.spec.ts @@ -42,12 +42,14 @@ const createMockCollection = (collectionName?: string) => { } } // Check for deliveryId (webhook tests) - if ((filter as { deliveryId?: string }).deliveryId && d.deliveryId === (filter as { deliveryId?: string }).deliveryId) return doc; + if ( + (filter as { deliveryId?: string }).deliveryId && + d.deliveryId === (filter as { deliveryId?: string }).deliveryId + ) + return doc; } // Fallback to findOneAndUpdate results for backward compatibility - const existingOp = dbOperations.find( - (op) => op.type === "findOneAndUpdate" && op.result, - ); + const existingOp = dbOperations.find((op) => op.type === "findOneAndUpdate" && op.result); if (existingOp && filter.version) { const result = existingOp.result as { coreVersion?: string } | undefined; if (result?.coreVersion === filter.version) { @@ -69,7 +71,7 @@ const createMockCollection = (collectionName?: string) => { }, insertOne: async (doc: unknown) => { const id = `mock_id_${++docIdCounter}`; - const docWithId = { ...doc as object, _id: id }; + const docWithId = { ...(doc as object), _id: id }; docs.set(id, docWithId); return { insertedId: id }; }, @@ -125,6 +127,14 @@ mock.module("./upsertSlackMessage", () => ({ url: `https://slack.com/message/${Date.now()}`, }; }, + upsertSlackMarkdownMessage: async (msg: SlackMessageType) => { + mockSlackMessages.push(msg); + return { + ...msg, + url: `https://slack.com/message/${Date.now()}`, + }; + }, + mdFmt: async (md: string) => md, })); // Now import the module to test (after all mocks are set up) @@ -174,9 +184,7 @@ describe("GithubDesktopReleaseNotificationTask", () => { expect(saveOps.length).toBeGreaterThanOrEqual(1); // Check if any save operation has slackMessageDrafting - const hasDraftingMessage = saveOps.some( - (op) => op.args[1]?.$set?.slackMessageDrafting, - ); + const hasDraftingMessage = saveOps.some((op) => op.args[1]?.$set?.slackMessageDrafting); expect(hasDraftingMessage).toBe(true); // Ensure slackMessage was NOT set for draft @@ -261,9 +269,7 @@ describe("GithubDesktopReleaseNotificationTask", () => { expect(saveOps.length).toBeGreaterThanOrEqual(1); // Check if any save operation has slackMessage - const hasStableMessage = saveOps.some( - (op) => op.args[1]?.$set?.slackMessage, - ); + const hasStableMessage = saveOps.some((op) => op.args[1]?.$set?.slackMessage); expect(hasStableMessage).toBe(true); }); @@ -343,9 +349,7 @@ describe("GithubDesktopReleaseNotificationTask", () => { expect(saveOps.length).toBeGreaterThanOrEqual(1); // Check if any save operation has slackMessageDrafting - const hasDraftingMessage = saveOps.some( - (op) => op.args[1]?.$set?.slackMessageDrafting, - ); + const hasDraftingMessage = saveOps.some((op) => op.args[1]?.$set?.slackMessageDrafting); expect(hasDraftingMessage).toBe(true); }); }); @@ -372,9 +376,7 @@ describe("GithubDesktopReleaseNotificationTask", () => { // Verify coreVersion was extracted const saveOps = dbOperations.filter((op) => op.type === "findOneAndUpdate"); - const hasCoreVersion = saveOps.some( - (op) => op.args[1]?.$set?.coreVersion === "v0.2.0", - ); + const hasCoreVersion = saveOps.some((op) => op.args[1]?.$set?.coreVersion === "v0.2.0"); expect(hasCoreVersion).toBe(true); }); }); diff --git a/app/tasks/gh-frontend-backport-checker/index.spec.ts b/app/tasks/gh-frontend-backport-checker/index.spec.ts index 30ceecaf..d85ffb92 100644 --- a/app/tasks/gh-frontend-backport-checker/index.spec.ts +++ b/app/tasks/gh-frontend-backport-checker/index.spec.ts @@ -1,5 +1,6 @@ import { describe, it, expect } from "bun:test"; import type { BackportStatus } from "./index"; +import { parseMinorVersion, middleTruncated, getBackportStatusEmoji } from "./index"; describe("GithubFrontendBackportCheckerTask", () => { describe("bugfix detection", () => { @@ -173,9 +174,7 @@ describe("GithubFrontendBackportCheckerTask", () => { .filter( (line) => line.startsWith(" ") && - (line.includes("❌") || - line.includes("🔄") || - line.includes("✅")), + (line.includes("❌") || line.includes("🔄") || line.includes("✅")), ) .map((line) => { const trimmed = line.trim(); @@ -255,6 +254,488 @@ describe("GithubFrontendBackportCheckerTask", () => { expect(config.maxReleasesToCheck).toBeGreaterThan(0); }); }); + + describe("parseMinorVersion", () => { + it("should parse minor version from standard semver tags", () => { + expect(parseMinorVersion("v1.38.1")).toBe(38); + expect(parseMinorVersion("v1.0.0")).toBe(0); + expect(parseMinorVersion("v2.5.3")).toBe(5); + expect(parseMinorVersion("v1.100.0")).toBe(100); + }); + + it("should parse tags without v prefix", () => { + expect(parseMinorVersion("1.38.1")).toBe(38); + expect(parseMinorVersion("1.0.0")).toBe(0); + }); + + it("should return null for unparseable tags", () => { + expect(parseMinorVersion("latest")).toBeNull(); + expect(parseMinorVersion("nightly")).toBeNull(); + expect(parseMinorVersion("")).toBeNull(); + }); + }); + + describe("version-based release filtering", () => { + it("should include releases within maxMinorVersionsBehind of latest", () => { + const maxMinorVersionsBehind = 4; + const latestMinor = 40; + const releases = [ + { tag: "v1.40.0", minor: 40 }, + { tag: "v1.39.2", minor: 39 }, + { tag: "v1.38.1", minor: 38 }, + { tag: "v1.37.0", minor: 37 }, + { tag: "v1.36.0", minor: 36 }, // exactly 4 behind, should be included (<=) + { tag: "v1.35.0", minor: 35 }, // 5 behind, should be excluded + ]; + + const included = releases.filter((r) => latestMinor - r.minor <= maxMinorVersionsBehind); + expect(included.map((r) => r.tag)).toEqual([ + "v1.40.0", + "v1.39.2", + "v1.38.1", + "v1.37.0", + "v1.36.0", + ]); + }); + }); + + describe("backport-not-needed labels", () => { + const backportNotNeededLabel = "no-backport-needed"; + const backportNotNeededLabels: Record = { + core: "no-backport-needed-core", + cloud: "no-backport-needed-cloud", + }; + + function hasBackportNotNeededLabel(labels: string[], targetPrefix: string): boolean { + if (labels.some((l) => l.toLowerCase() === backportNotNeededLabel.toLowerCase())) return true; + const notNeededLabel = backportNotNeededLabels[targetPrefix]; + if (!notNeededLabel) return false; + return labels.some((l) => l.toLowerCase() === notNeededLabel.toLowerCase()); + } + + it("should detect no-backport-needed-core label", () => { + const labels = ["bug", "no-backport-needed-core", "core/1.4"]; + expect(hasBackportNotNeededLabel(labels, "core")).toBe(true); + expect(hasBackportNotNeededLabel(labels, "cloud")).toBe(false); + }); + + it("should detect no-backport-needed-cloud label", () => { + const labels = ["bug", "no-backport-needed-cloud", "cloud/1.36"]; + expect(hasBackportNotNeededLabel(labels, "cloud")).toBe(true); + expect(hasBackportNotNeededLabel(labels, "core")).toBe(false); + }); + + it("should detect general no-backport-needed label for all targets", () => { + const labels = ["bug", "no-backport-needed"]; + expect(hasBackportNotNeededLabel(labels, "core")).toBe(true); + expect(hasBackportNotNeededLabel(labels, "cloud")).toBe(true); + }); + + it("should be case insensitive", () => { + const labels = ["No-Backport-Needed-Core"]; + expect(hasBackportNotNeededLabel(labels, "core")).toBe(true); + }); + + it("should return false when no matching label", () => { + const labels = ["bug", "needs-backport"]; + expect(hasBackportNotNeededLabel(labels, "core")).toBe(false); + expect(hasBackportNotNeededLabel(labels, "cloud")).toBe(false); + }); + + it("should return false for unknown target prefix without general label", () => { + const labels = ["no-backport-needed-core"]; + expect(hasBackportNotNeededLabel(labels, "unknown")).toBe(false); + }); + }); + + describe("release sheriff parsing", () => { + it("should parse Slack user ID from channel description", () => { + const text = "Current Release Sheriff: <@U12345678>"; + const match = text.match(/Release Sheriff:?\s*<@(\w+)>/i); + expect(match?.[1]).toBe("U12345678"); + }); + + it("should handle description without sheriff", () => { + const text = "Frontend releases channel"; + const match = text.match(/Release Sheriff:?\s*<@(\w+)>/i); + expect(match).toBeNull(); + }); + + it("should handle various formatting", () => { + const formats = [ + "Release Sheriff: <@U999>", + "Current Release Sheriff: <@UABC123>", + "release sheriff <@U111>", + ]; + for (const text of formats) { + const match = text.match(/Release Sheriff:?\s*<@(\w+)>/i); + expect(match?.[1]).toBeTruthy(); + } + }); + }); + + describe("middleTruncated", () => { + it("should return string as-is when within maxLength", () => { + expect(middleTruncated(20, "short string")).toBe("short string"); + }); + + it("should return string as-is when exactly maxLength", () => { + expect(middleTruncated(5, "abcde")).toBe("abcde"); + }); + + it("should truncate middle of long strings", () => { + const result = middleTruncated(11, "abcdefghijklmnop"); + expect(result).toHaveLength(11); + expect(result).toContain("..."); + expect(result.startsWith("abcd")).toBe(true); + expect(result.endsWith("mnop")).toBe(true); + }); + + it("should handle empty string", () => { + expect(middleTruncated(10, "")).toBe(""); + }); + }); + + describe("getBackportStatusEmoji", () => { + it("should return Slack emoji for completed", () => { + expect(getBackportStatusEmoji("completed")).toBe(":pr-merged:"); + }); + + it("should return Slack emoji for in-progress", () => { + expect(getBackportStatusEmoji("in-progress")).toBe(":pr-open:"); + }); + + it("should return exclamation for needed", () => { + expect(getBackportStatusEmoji("needed")).toContain("Need backport"); + }); + + it("should return dash for not-needed", () => { + expect(getBackportStatusEmoji("not-needed")).toBe("➖"); + }); + + it("should return spaces for unknown", () => { + expect(getBackportStatusEmoji("unknown")).toBe(" "); + }); + }); + + describe("already-backported commit filtering", () => { + const backportFilter = /\[backport .*?\]/i; + + it("should detect [backport ...] commits", () => { + expect(backportFilter.test("[backport core/1.4] fix: auth bug")).toBe(true); + expect(backportFilter.test("[Backport cloud/1.36] fix: render issue")).toBe(true); + expect(backportFilter.test("[BACKPORT stable] hotfix: crash")).toBe(true); + }); + + it("should not filter normal bugfix commits", () => { + expect(backportFilter.test("fix: authentication bug")).toBe(false); + expect(backportFilter.test("hotfix: resolve crash")).toBe(false); + }); + + it("should handle backport with various content inside brackets", () => { + expect(backportFilter.test("[backport core/1.4, cloud/1.36] fix: bug")).toBe(true); + expect(backportFilter.test("[backport v2] patch: security fix")).toBe(true); + }); + + it("should not match empty backport brackets", () => { + expect(backportFilter.test("[backport] fix: something")).toBe(false); + }); + }); + + describe("bot comment filtering", () => { + const botPattern = /\bbot$|\[bot\]$/; + + it("should detect bot usernames", () => { + expect(botPattern.test("dependabot")).toBe(false); // "dependabot" doesn't match \bbot — "bot" is a full word here, let me check + expect(botPattern.test("github-actions[bot]")).toBe(true); + expect(botPattern.test("renovate[bot]")).toBe(true); + expect(botPattern.test("comfy-bot")).toBe(true); + }); + + it("should not filter human usernames", () => { + expect(botPattern.test("john")).toBe(false); + expect(botPattern.test("robotics-engineer")).toBe(false); + expect(botPattern.test("bottleneck")).toBe(false); + }); + + it("should match usernames ending in 'bot' as a word boundary", () => { + expect(botPattern.test("some-bot")).toBe(true); + expect(botPattern.test("mybot")).toBe(false); // no word boundary before "bot" + }); + }); + + describe("overall backport status derivation", () => { + function deriveOverallStatus( + backportTargetStatus: Array<{ status: BackportStatus }>, + ): BackportStatus { + const activeTargets = backportTargetStatus.filter((t) => t.status !== "not-needed"); + return activeTargets.length && activeTargets.every((t) => t.status === "completed") + ? "completed" + : activeTargets.some((t) => t.status === "in-progress") + ? "in-progress" + : activeTargets.some((t) => t.status === "needed") + ? "needed" + : backportTargetStatus.length && !activeTargets.length + ? "not-needed" + : "unknown"; + } + + it("should return completed when all active targets are completed", () => { + expect(deriveOverallStatus([{ status: "completed" }, { status: "completed" }])).toBe( + "completed", + ); + }); + + it("should return in-progress when any target is in-progress", () => { + expect(deriveOverallStatus([{ status: "completed" }, { status: "in-progress" }])).toBe( + "in-progress", + ); + }); + + it("should return needed when any target is needed", () => { + expect(deriveOverallStatus([{ status: "completed" }, { status: "needed" }])).toBe("needed"); + }); + + it("should return not-needed when all targets are not-needed", () => { + expect(deriveOverallStatus([{ status: "not-needed" }, { status: "not-needed" }])).toBe( + "not-needed", + ); + }); + + it("should return unknown when no targets exist", () => { + expect(deriveOverallStatus([])).toBe("unknown"); + }); + + it("should ignore not-needed targets in priority calculation", () => { + expect(deriveOverallStatus([{ status: "not-needed" }, { status: "completed" }])).toBe( + "completed", + ); + }); + + it("should prioritize in-progress over needed", () => { + expect(deriveOverallStatus([{ status: "needed" }, { status: "in-progress" }])).toBe( + "in-progress", + ); + }); + + it("should return completed when only active target is completed alongside not-needed", () => { + expect( + deriveOverallStatus([ + { status: "not-needed" }, + { status: "completed" }, + { status: "not-needed" }, + ]), + ).toBe("completed"); + }); + }); + + describe("compare link regex parsing", () => { + const compareRegex = + /github\.com\/(?[^/]+)\/(?[^/]+)\/compare\/(?\S+)\.\.\.(?\S+)/; + + it("should parse standard compare links", () => { + const url = "https://github.com/Comfy-Org/ComfyUI_frontend/compare/v1.38.0...v1.38.1"; + const groups = url.match(compareRegex)?.groups; + expect(groups?.owner).toBe("Comfy-Org"); + expect(groups?.repo).toBe("ComfyUI_frontend"); + expect(groups?.base).toBe("v1.38.0"); + expect(groups?.head).toBe("v1.38.1"); + }); + + it("should parse compare links with branch names", () => { + const url = "https://github.com/Comfy-Org/ComfyUI_frontend/compare/main...feature/branch"; + const groups = url.match(compareRegex)?.groups; + expect(groups?.owner).toBe("Comfy-Org"); + expect(groups?.base).toBe("main"); + expect(groups?.head).toBe("feature/branch"); + }); + + it("should fail on invalid compare links", () => { + const url = "https://github.com/Comfy-Org/ComfyUI_frontend/pulls"; + expect(url.match(compareRegex)).toBeNull(); + }); + }); + + describe("backport branch name generation", () => { + function generateBackportBranch(prNumber: number, branchName: string): string { + return `backport-${prNumber}-to-${branchName.replaceAll("/", "-")}`; + } + + it("should generate correct branch name with slash replacement", () => { + expect(generateBackportBranch(123, "core/1.4")).toBe("backport-123-to-core-1.4"); + }); + + it("should handle multiple slashes", () => { + expect(generateBackportBranch(456, "cloud/1.36")).toBe("backport-456-to-cloud-1.36"); + }); + + it("should handle branch names without slashes", () => { + expect(generateBackportBranch(789, "stable")).toBe("backport-789-to-stable"); + }); + }); + + describe("processSince date filtering", () => { + const processSince = new Date("2026-01-06T00:00:00Z"); + + it("should include releases after processSince", () => { + const releaseDate = new Date("2026-02-01T00:00:00Z"); + expect(+releaseDate >= +processSince).toBe(true); + }); + + it("should include releases exactly at processSince", () => { + const releaseDate = new Date("2026-01-06T00:00:00Z"); + expect(+releaseDate >= +processSince).toBe(true); + }); + + it("should exclude releases before processSince", () => { + const releaseDate = new Date("2025-12-31T23:59:59Z"); + expect(+releaseDate >= +processSince).toBe(false); + }); + }); + + describe("targetBranches guard (backportStatusRaw === needed)", () => { + const reBackportTargets = /^(core|cloud)\/1\..*$/; + + function getTargetBranches(labels: string[], backportStatusRaw: BackportStatus): string[] { + return labels + .filter((l) => reBackportTargets.test(l)) + .filter((_e) => backportStatusRaw === "needed"); + } + + it("should return target branches when status is needed", () => { + const labels = ["core/1.4", "cloud/1.36", "bug"]; + expect(getTargetBranches(labels, "needed")).toEqual(["core/1.4", "cloud/1.36"]); + }); + + it("should return empty when status is unknown", () => { + const labels = ["core/1.4", "cloud/1.36"]; + expect(getTargetBranches(labels, "unknown")).toEqual([]); + }); + + it("should return empty when status is completed", () => { + const labels = ["core/1.4"]; + expect(getTargetBranches(labels, "completed")).toEqual([]); + }); + + it("should return empty when status is in-progress", () => { + const labels = ["core/1.4"]; + expect(getTargetBranches(labels, "in-progress")).toEqual([]); + }); + + it("should return empty when no matching labels even if needed", () => { + const labels = ["bug", "needs-backport"]; + expect(getTargetBranches(labels, "needed")).toEqual([]); + }); + }); + + describe("idempotent Slack update check", () => { + it("should detect when report text has changed", () => { + const newReport = "**Release v1.40.0 Backport Status:**\nnew content"; + const existingText = "**Release v1.40.0 Backport Status:**\nold content"; + expect(newReport.trim() !== existingText.trim()).toBe(true); + }); + + it("should skip update when text is identical", () => { + const text = "**Release v1.40.0 Backport Status:**\nsame content"; + expect(text.trim() !== text.trim()).toBe(false); + }); + + it("should skip update when only whitespace differs at edges", () => { + const newReport = " report content \n"; + const existingText = "report content"; + expect(newReport.trim() !== existingText.trim()).toBe(false); + }); + }); + + describe("author tag resolution filtering", () => { + it("should only resolve tags for non-completed, non-not-needed statuses", () => { + const bugfixCommits = [ + { prAuthor: "alice", backportStatus: "needed" as BackportStatus }, + { prAuthor: "bob", backportStatus: "completed" as BackportStatus }, + { prAuthor: "charlie", backportStatus: "not-needed" as BackportStatus }, + { prAuthor: "dave", backportStatus: "in-progress" as BackportStatus }, + { prAuthor: "eve", backportStatus: "unknown" as BackportStatus }, + ]; + + const authorsToResolve = bugfixCommits + .filter( + (bf) => + bf.prAuthor && bf.backportStatus !== "completed" && bf.backportStatus !== "not-needed", + ) + .map((bf) => bf.prAuthor); + + expect(authorsToResolve).toEqual(["alice", "dave", "eve"]); + expect(authorsToResolve).not.toContain("bob"); + expect(authorsToResolve).not.toContain("charlie"); + }); + + it("should deduplicate authors", () => { + const bugfixCommits = [ + { prAuthor: "alice", backportStatus: "needed" as BackportStatus }, + { prAuthor: "alice", backportStatus: "needed" as BackportStatus }, + ]; + + const seen = new Set(); + const authorsToResolve = bugfixCommits + .filter((bf) => { + if (!bf.prAuthor || seen.has(bf.prAuthor)) return false; + if (bf.backportStatus === "completed" || bf.backportStatus === "not-needed") return false; + seen.add(bf.prAuthor); + return true; + }) + .map((bf) => bf.prAuthor); + + expect(authorsToResolve).toEqual(["alice"]); + }); + }); + + describe("report status categorization", () => { + it("should categorize as not-mentioned when no backportTargetStatus", () => { + const commit = { backportTargetStatus: [] as { status: BackportStatus }[] }; + const status = !commit.backportTargetStatus.length + ? "not-mentioned" + : commit.backportTargetStatus.some( + (t) => t.status !== "completed" && t.status !== "not-needed", + ) + ? "in-progress" + : "completed"; + expect(status).toBe("not-mentioned"); + }); + + it("should categorize as in-progress when any target is not completed/not-needed", () => { + const commit = { + backportTargetStatus: [ + { status: "completed" as BackportStatus }, + { status: "needed" as BackportStatus }, + ], + }; + const status = !commit.backportTargetStatus.length + ? "not-mentioned" + : commit.backportTargetStatus.some( + (t) => t.status !== "completed" && t.status !== "not-needed", + ) + ? "in-progress" + : "completed"; + expect(status).toBe("in-progress"); + }); + + it("should categorize as completed when all targets are completed or not-needed", () => { + const commit = { + backportTargetStatus: [ + { status: "completed" as BackportStatus }, + { status: "not-needed" as BackportStatus }, + ], + }; + const status = !commit.backportTargetStatus.length + ? "not-mentioned" + : commit.backportTargetStatus.some( + (t) => t.status !== "completed" && t.status !== "not-needed", + ) + ? "in-progress" + : "completed"; + expect(status).toBe("completed"); + }); + }); }); // Helper functions for testing diff --git a/app/tasks/gh-frontend-backport-checker/index.ts b/app/tasks/gh-frontend-backport-checker/index.ts index b165eafd..ffd48b8a 100644 --- a/app/tasks/gh-frontend-backport-checker/index.ts +++ b/app/tasks/gh-frontend-backport-checker/index.ts @@ -11,24 +11,168 @@ import { logger } from "@/src/logger"; import prettier from "prettier"; import { ghPageFlow } from "@/src/ghPageFlow"; import { match as tsmatch } from "ts-pattern"; +import { getChannelInfo } from "@/lib/slack/channel-info"; +import { getSlackChannel } from "@/lib/slack/channels"; +import { slackCached } from "@/lib"; /** * GitHub Frontend Backport Checker Task * - * Workflow: - * 1. Monitor ComfyUI_frontend recent N releases - * 2. Identify bugfix commits (keywords: fix, bugfix, hotfix, patch, bug) - * 3. For each bugfix, find the associated PR - * 4. Check PR labels for backport indicators (core/1.**, cloud/1.**) - * 5. Check PR comments for backport mentions - * 6. Track status and send Slack summary (to channel #frontend-releases) + * Automatically monitors ComfyUI_frontend releases for bugfix commits that may + * need backporting to stable branches (core/1.**, cloud/1.**), then posts a + * per-release status report to Slack (#frontend-releases). + * + * ── How it works ────────────────────────────────────────────────────────────── + * + * 1. DISCOVER BACKPORT TARGET BRANCHES + * Lists all repo branches matching `core/1.**` or `cloud/1.**` via the + * GitHub API. These are the branches that bugfixes may need cherry-picking to. + * + * 2. FETCH RECENT RELEASES + * Fetches up to `maxReleasesToCheck` (10) releases from ComfyUI_frontend. + * Filters them by: + * - `processSince` date (skip very old releases) + * - `maxMinorVersionsBehind` (4) — only show releases whose minor version + * is at most 4 behind the latest (e.g. if latest is v1.40, v1.36 is + * included but v1.35 is not) + * + * 3. EXTRACT COMPARE LINK FROM RELEASE BODY + * Each release body contains a GitHub compare URL (e.g. + * `.../compare/v1.38.0...v1.38.1`). This is used to get the list of commits + * included in that release. + * + * 4. IDENTIFY BUGFIX COMMITS + * From the compare diff, filters commits whose first line matches bugfix + * keywords: fix, bugfix, hotfix, patch, bug (case-insensitive). + * Excludes commits already tagged as `[backport ...]` (already cherry-picked). + * + * 5. RESOLVE ASSOCIATED PR FOR EACH BUGFIX COMMIT + * Uses `listPullRequestsAssociatedWithCommit` to find the PR that introduced + * each bugfix commit. + * + * 6. DETERMINE BACKPORT STATUS FOR EACH PR + * For each bugfix PR, checks: + * + * a) PR LABELS — filters labels matching `reBackportTargets` regex + * (e.g. `core/1.4`, `cloud/1.36`). If any such labels exist, derives + * `backportStatusRaw` from them: checks for "completed"/"in-progress"/ + * "needs" substrings, but in practice these branch-style labels always + * fall through to the default → "needed". + * + * b) PR BODY & COMMENTS — scans for mentions of "backport" or "stable" + * (excluding bot comments). If found, marks as "needed". + * + * c) PER-TARGET-BRANCH STATUS — only runs when `backportStatusRaw` is + * "needed". For each labeled target branch: + * - Checks for `no-backport-needed[-core|-cloud]` labels + * → marks that target as "not-needed" + * - Uses `compareCommits(target_branch, commit_sha)` to check if the + * commit already exists on that branch: + * • "identical"/"behind" → "completed" (commit is already there) + * • "ahead" → "needed" (commit is missing) + * • "diverged" → searches for a backport PR with the + * naming convention `backport-{prNumber}-to-{branch}`: + * - If a merged backport PR exists → "completed" + * - If an open backport PR exists → "in-progress" + * - Otherwise → "needed" + * + * d) OVERALL STATUS — derived from per-target statuses (ignoring not-needed): + * - All completed → "completed" + * - Any in-progress → "in-progress" + * - Any needed → "needed" + * - All not-needed → "not-needed" + * - Otherwise → "unknown" + * + * 7. GENERATE REPORT & POST TO SLACK + * Builds a markdown report per release showing each bugfix and its backport + * status across targets. For PRs needing backport, resolves the PR author's + * Slack user ID (by matching GitHub username → Slack display name) and tags + * them. Falls back to tagging the "Release Sheriff" (parsed from the + * #frontend-releases channel topic/purpose). + * + * The report is upserted (created or updated) as a Slack message via + * `upsertSlackMarkdownMessage`, so re-runs update existing messages rather + * than creating duplicates. + * + * 8. PERSISTENCE + * All state is stored in MongoDB collection `GithubFrontendBackportCheckerTask`, + * keyed by `releaseUrl`. This allows incremental re-checks and preserves + * Slack message references for updates. + * + * ── Edge Cases & Special Handling ────────────────────────────────────────────── + * + * • UNPARSEABLE VERSION TAGS — if `parseMinorVersion` returns null (e.g. tag + * "nightly" or "latest"), the release is included rather than excluded, so + * non-semver releases are never silently skipped. + * + * • MISSING COMPARE LINK — if the release body does not contain a + * `.../compare/...` URL, the task throws via `DIE()`. This means releases + * without a proper changelog are treated as errors rather than silently + * ignored. + * + * • COMPARE API FAILURE — if `compareCommits` fails for a release (e.g. tags + * deleted, repo renamed), the release is saved with `taskStatus: "failed"` + * and processing continues to the next release. + * + * • ALREADY-BACKPORTED COMMITS — commits whose first line matches + * `[backport ...]` (case-insensitive) are filtered out, preventing double- + * counting of cherry-pick commits that landed in the same release. + * + * • NO ASSOCIATED PR — if `listPullRequestsAssociatedWithCommit` returns + * an empty array, the commit produces no bugfix entries (the `.map().flat()` + * over PRs yields nothing). Direct pushes without a PR are silently skipped. + * + * • BOT COMMENTS — when scanning PR comments for backport mentions, comments + * from bots (username ending in `bot` or `[bot]`) are excluded to avoid + * false positives from automated messages. + * + * • BACKPORT-NOT-NEEDED LABELS — `no-backport-needed` dismisses all targets; + * per-target labels `no-backport-needed-core` / `no-backport-needed-cloud` + * override individual branch status to "not-needed", even if the commit + * hasn't been cherry-picked. When ALL targets are dismissed, the overall + * status becomes "not-needed". + * + * • DIVERGED BRANCH (backport PR detection) — when the target branch has + * diverged from the commit (common for long-lived stable branches), the + * checker searches for a PR with branch name `backport-{prNumber}-to-{branch}` + * and additionally filters by `head.ref` to avoid false matches from + * similarly-named branches. Checks all states (open, closed, merged). + * + * • SLACK USER RESOLUTION — attempts to match GitHub username to a Slack user + * by comparing against `name`, `display_name`, and `real_name` (with spaces + * stripped, case-insensitive). If no match is found, falls back to tagging + * the Release Sheriff (parsed from #frontend-releases channel topic/purpose + * via regex `Release Sheriff:? <@UXXXXXX>`). If neither resolves, no one is + * tagged. + * + * • DRY RUN MODE — when `--dry-run` is passed, Slack tags show raw GitHub + * usernames (e.g. `@octocat`) instead of making Slack API calls, and no + * Slack messages are sent/updated. + * + * • IDEMPOTENT SLACK UPDATES — the report is only sent/updated when the + * formatted text differs from the previously stored `slackMessage.text`. + * Re-runs with no status changes produce no Slack API calls. + * + * • NO BUGFIX COMMITS — if a release has zero bugfix commits after filtering, + * the task is saved as `taskStatus: "completed"` with an empty array and no + * Slack message is sent. + * + * • CI MODE — when running in CI (`is-ci` package), the database connection + * is closed and the process exits after one run instead of staying alive + * for hot-reload. + * + * ── Running ─────────────────────────────────────────────────────────────────── + * + * bun app/tasks/gh-frontend-backport-checker/index.ts # normal + * bun app/tasks/gh-frontend-backport-checker/index.ts --dry-run # no Slack * */ const config = { // 1. monitor releases from this repo repo: "https://github.com/Comfy-Org/ComfyUI_frontend", - maxReleasesToCheck: 3, + maxReleasesToCheck: 10, // fetch more releases, then filter by version distance + maxMinorVersionsBehind: 4, // stop showing backport warnings after this many minor versions behind latest processSince: new Date("2026-01-06T00:00:00Z").toISOString(), // only process releases since this date, to avoid posting too msgs in old releases // 2. identify bugfix commits @@ -40,6 +184,13 @@ const config = { // 3. backport labels on PRs backportLabels: ["needs-backport"], + // labels that dismiss backport requirements per target (or all targets) + backportNotNeededLabel: "no-backport-needed", + backportNotNeededLabels: { + core: "no-backport-needed-core", + cloud: "no-backport-needed-cloud", + } as Record, + // 5. detect backport mentions reBackportMentionPatterns: /\b(backports?|stable)\b/i, @@ -64,6 +215,7 @@ export type GithubFrontendBackportCheckerTask = { prNumber?: number; prTitle?: string; prLabels?: string[]; + prAuthor?: string; // GitHub username of PR author backportStatus: BackportStatus; // overall status, derived from backportTargetStatus, calculated by backport targets (core/1.**, cloud/1.**) backportStatusRaw: BackportStatus; // raw status from bugfix PR analysis, before checking backport targets status @@ -105,9 +257,12 @@ const save = async (task: { releaseUrl: string } & Partial parseMinorVersion(r.tag_name)) + .filter((v): v is number => v !== null) + .reduce((a, b) => Math.max(a, b), 0); + logger.info( + `Latest minor version: ${latestMinor}, will show releases within ${config.maxMinorVersionsBehind} minor versions`, + ); + // Process each release const processedReleases = await sflow(releases) .filter((release) => +new Date(release.created_at) >= +new Date(config.processSince)) + // Filter by version distance: show releases up to and including maxMinorVersionsBehind behind latest + .filter((release) => { + const minor = parseMinorVersion(release.tag_name); + if (minor === null) return true; // can't parse, include it + return latestMinor - minor <= config.maxMinorVersionsBehind; + }) .map(async function convertReleaseToTask(release) { const compareLink = ( @@ -175,7 +345,7 @@ export default async function runGithubFrontendBackportCheckerTask() { ); } -function getBackportStatusEmoji(status: BackportStatus): string { +export function getBackportStatusEmoji(status: BackportStatus): string { switch (status) { case "completed": return ":pr-merged:"; @@ -192,12 +362,118 @@ function getBackportStatusEmoji(status: BackportStatus): string { } } -function middleTruncated(maxLength: number, str: string): string { +export function middleTruncated(maxLength: number, str: string): string { if (str.length <= maxLength) return str; const half = Math.floor((maxLength - 3) / 2); return `${str.slice(0, half)}...${str.slice(-half)}`; } +/** Parse semver minor version from a release tag like "v1.38.1" → 38 */ +export function parseMinorVersion(tag: string): number | null { + const match = tag.match(/v?\d+\.(\d+)/); + return match ? parseInt(match[1], 10) : null; +} + +/** + * Get the current release sheriff's Slack user ID from the #frontend-releases channel description. + * Expects format: "Current Release Sheriff: <@U12345678>" in channel purpose or topic. + */ +export async function getReleaseSheriffUserId(): Promise { + try { + const channel = await getSlackChannel(config.slackChannelName); + if (!channel?.id) return null; + const info = await getChannelInfo(channel.id as string); + // Slack stores mentions as <@U12345678> in description + const text = `${info.purpose?.value || ""} ${info.topic?.value || ""}`; + const match = text.match(/Release Sheriff:?\s*<@(\w+)>/i); + return match?.[1] || null; + } catch (e) { + logger.warn("Failed to get release sheriff from channel description", { error: e }); + return null; + } +} + +/** Cached promise for all Slack workspace members — fetched once per run. */ +let slackMembersCache: Promise< + NonNullable>["members"]> +> | null = null; + +async function getAllSlackMembers() { + if (!slackMembersCache) { + slackMembersCache = (async () => { + const firstPage = await slackCached.users.list({ limit: 500 }); + const members = [...(firstPage.members || [])]; + let cursor = firstPage.response_metadata?.next_cursor || undefined; + while (cursor) { + const page = await slackCached.users.list({ limit: 500, cursor }); + members.push(...(page.members || [])); + cursor = page.response_metadata?.next_cursor || undefined; + } + return members; + })(); + } + return slackMembersCache; +} + +/** + * Try to find a Slack user ID for a GitHub username. + * Matches against Slack display_name, name, and real_name (case-insensitive). + * Returns null if no match found. + */ +export async function findSlackUserIdByGithubUsername( + githubUsername: string, +): Promise { + try { + const members = await getAllSlackMembers(); + const lowerGh = githubUsername.toLowerCase(); + const found = members.find((m) => { + if (m.deleted || m.is_bot) return false; + const profile = m.profile as Record | undefined; + return ( + m.name?.toLowerCase() === lowerGh || + (profile?.display_name as string)?.toLowerCase() === lowerGh || + ((profile?.real_name as string | undefined) || "") + .toLowerCase() + .replace(/\s+/g, "") + .includes(lowerGh) + ); + }); + return (found?.id as string) || null; + } catch (e) { + logger.warn("Failed to look up Slack user for GitHub username", { + githubUsername, + error: e, + }); + return null; + } +} + +/** + * Resolve who to tag in Slack for a backport notification. + * Tries the PR author first, falls back to release sheriff. + */ +async function resolveSlackTagForAuthor(githubUsername?: string): Promise { + if (githubUsername) { + const slackUserId = await findSlackUserIdByGithubUsername(githubUsername); + if (slackUserId) return `<@${slackUserId}>`; + } + // Fall back to release sheriff + const sheriffId = await getReleaseSheriffUserId(); + if (sheriffId) return `<@${sheriffId}>`; + return ""; // no one to tag +} + +/** Check if a PR has a backport-not-needed label for a given target prefix (e.g. "core" or "cloud") */ +function hasBackportNotNeededLabel(labels: string[], targetPrefix: string): boolean { + // General label dismisses all targets + if (labels.some((l) => l.toLowerCase() === config.backportNotNeededLabel.toLowerCase())) + return true; + // Per-target label + const notNeededLabel = config.backportNotNeededLabels[targetPrefix]; + if (!notNeededLabel) return false; + return labels.some((l) => l.toLowerCase() === notNeededLabel.toLowerCase()); +} + async function processTask( task: GithubFrontendBackportCheckerTask, ): Promise { @@ -209,10 +485,17 @@ async function processTask( /github\.com\/(?[^/]+)\/(?[^/]+)\/compare\/(?\S+)\.\.\.(?\S+)/, )?.groups || DIE(`Failed to parse compare link: ${compareLink}`); logger.debug(` Comparing to head: ${head}`); - // const compareApiUrl = compareLink - const compareResult = await ghc.repos - .compareCommits({ owner, repo, base, head }) - .then((e) => e.data.commits); + let compareResult; + try { + compareResult = await ghc.repos + .compareCommits({ owner, repo, base, head }) + .then((e) => e.data.commits); + } catch (e) { + logger.warn(` Failed to compare ${base}...${head}, skipping release ${task.releaseTag}`, { + error: e, + }); + return await save({ ...task, bugfixCommits: [], taskStatus: "failed" }); + } logger.debug(` Found ${compareResult.length} commits in release`); // // collect already backported commits, for logging purpose @@ -259,7 +542,9 @@ async function processTask( logger.debug(` Processing PR #${prNumber}: ${prTitle}`); // Check labels - const labels = pr.labels.map((l) => (typeof l === "string" ? l : l.name)); + const labels = pr.labels + .map((l) => (typeof l === "string" ? l : l.name)) + .filter((l): l is string => !!l); const backportLabels = labels.filter((l) => config.reBackportTargets.test(l)); // Check PR body and comments for backport mentions @@ -308,19 +593,32 @@ async function processTask( )})`, ); // check each backport target branch status - const backportTargetStatus = await sflow( - labels.filter((l) => config.reBackportTargets.test(l)), - ) - // only when this pr is needed to backport - .filter((_e) => backportStatusRaw === "needed") - // only when this branch is in PR labels - .filter((branchName) => - labels.some((l) => l.toLowerCase().includes(branchName.toLowerCase())), - ) - // now check if the commit is in the branch + const targetBranches = labels + .filter((l) => config.reBackportTargets.test(l)) + .filter((_e) => backportStatusRaw === "needed"); + + const backportTargetStatus = await sflow(targetBranches) .map(async (branchName) => { - // let status: BackportStatus = "unknown"; - // check if the commit is in the branch + // Check for no-backport-needed[-core|-cloud] labels first (e.g. "core/1.4" → prefix "core") + const targetPrefix = branchName.split("/")[0]; + if (hasBackportNotNeededLabel(labels, targetPrefix)) { + logger.debug( + ` Backport target branch ${branchName} marked not-needed by label`, + ); + return { + branch: branchName, + status: "not-needed" as BackportStatus, + prs: [] as { + prUrl?: string; + prNumber?: number; + prTitle?: string; + prStatus?: "open" | "closed" | "merged"; + lastCheckedAt?: Date; + }[], + }; + } + + // now check if the commit is in the branch const comparing = await ghc.repos .compareCommits({ owner, @@ -330,51 +628,39 @@ async function processTask( }) .then((e) => e.data); let PRs: { - prUrl?: string; // if backport PR exists + prUrl?: string; prNumber?: number; prTitle?: string; prStatus?: "open" | "closed" | "merged"; lastCheckedAt?: Date; }[] = []; const status: BackportStatus = await tsmatch(comparing.status) - .with("ahead", () => "needed" as const) // not yet backported - .with("identical", () => "completed" as const) // completed, because fix commit is already in the target branch - .with("behind", () => "completed" as const) // completed, because fix commit is already in the target branch - - // diverged means we need to determine backport PR status: - // when PR not exists, we need to backport - // when PR exists, merged: already backport - // when PR exists, open: in progress + .with("ahead", () => "needed" as const) + .with("identical", () => "completed" as const) + .with("behind", () => "completed" as const) .with("diverged", async () => { - // search the backport pr and check its status - // e.g. backport-7974-to-cloud-1.36, this branch name is autogenerated by ci const backportBranch = `backport-${prNumber}-to-${branchName.replaceAll("/", "-")}`; const backportPRs = await ghPageFlow(ghc.pulls.list)({ owner, repo, head: backportBranch, base: branchName, - state: "all", // include closed/merged prs + state: "all", }) - .filter((e) => e.head.ref === backportBranch) // hack bug: github api seems also returns other prs + .filter((e) => e.head.ref === backportBranch) .toArray(); - PRs = backportPRs.map((pr) => ({ - prUrl: pr.html_url, - prNumber: pr.number, - prTitle: pr.title, - prStatus: pr.merged_at ? "merged" : pr.state === "open" ? "open" : "closed", + PRs = backportPRs.map((bpr) => ({ + prUrl: bpr.html_url, + prNumber: bpr.number, + prTitle: bpr.title, + prStatus: bpr.merged_at ? "merged" : bpr.state === "open" ? "open" : "closed", lastCheckedAt: new Date(), })); - // if pr is merged - if (backportPRs.some((e) => e.merged_at)) return "completed" as const; // some of backport prs are merged - if (backportPRs.some((e) => e.state.toUpperCase() === "OPEN")) { - return "in-progress" as const; // some of backport prs are open - } - // backportPRs[0].closed_at - // if pr is merged - // return comparing.status; + if (backportPRs.some((e) => e.merged_at)) return "completed" as const; + if (backportPRs.some((e) => e.state.toUpperCase() === "OPEN")) + return "in-progress" as const; return "needed" as const; }) .otherwise(() => { @@ -384,26 +670,24 @@ async function processTask( return "unknown" as const; }); - // if (isInBranch) { - // status = "completed"; - // } else {j - // status = "needed"; - // } logger.debug(` Backport target branch ${branchName} status: ${status}`); return { branch: branchName, status, prs: PRs }; }) .toArray(); + // Determine overall backport status (ignoring "not-needed" targets) + const activeTargets = backportTargetStatus.filter((t) => t.status !== "not-needed"); const backportStatus: BackportStatus = - backportTargetStatus.length && - backportTargetStatus.every((t) => t.status === "completed") + activeTargets.length && activeTargets.every((t) => t.status === "completed") ? "completed" - : backportTargetStatus.some((t) => t.status === "in-progress") + : activeTargets.some((t) => t.status === "in-progress") ? "in-progress" - : backportTargetStatus.some((t) => t.status === "needed") + : activeTargets.some((t) => t.status === "needed") ? "needed" - : "unknown"; - // Save to database + : backportTargetStatus.length && !activeTargets.length + ? "not-needed" // all targets have backport-not-needed labels + : "unknown"; + return { commitSha, commitMessage, @@ -411,6 +695,7 @@ async function processTask( prNumber, prTitle, prLabels: labels, + prAuthor: pr.user?.login, backportStatus, backportStatusRaw, @@ -425,18 +710,31 @@ async function processTask( .toArray(); if (!bugfixCommits.length) { - // no need to report return await save({ ...task, bugfixCommits, taskStatus: "completed" }); } + // Resolve Slack tags for authors who have unresolved backports + const authorTags = new Map(); + for (const bf of bugfixCommits) { + if ( + bf.prAuthor && + !authorTags.has(bf.prAuthor) && + bf.backportStatus !== "completed" && + bf.backportStatus !== "not-needed" + ) { + authorTags.set(bf.prAuthor, await resolveSlackTagForAuthor(bf.prAuthor)); + } + } + const statuses = bugfixCommits.map((e) => ({ ...e, status: !e.backportTargetStatus.length ? ("not-mentioned" as const) - : e.backportTargetStatus.some((e) => e.status !== "completed") + : e.backportTargetStatus.some((t) => t.status !== "completed" && t.status !== "not-needed") ? ("in-progress" as const) : ("completed" as const), })); + // - generate report based on commits, note: slack's markdown not support table const rawReport = `**Release [${task.releaseTag}](${task.releaseUrl}) Backport Status:${ statuses.filter((e) => e.status !== "completed").length ? "" : " Completed" @@ -446,67 +744,40 @@ ${ // not mentioned, show might need statuses .filter((e) => !e.backportTargetStatus.length) - .map( - (bf) => `[${middleTruncated(60, bf.commitMessage)}](${bf.prUrl}) ➡️ _❗ Might need backport_`, - ) - .join("\n") -} -${ - // in-progress, show detailed status - bugfixCommits - .filter( - (e) => - e.backportTargetStatus?.length && - e.backportTargetStatus.some((e) => e.status !== "completed"), - ) .map((bf) => { - const targetsStatuses = bf.backportTargetStatus - .map((ts) => { - const prStatus = ts.prs - .map((pr) => - pr.prUrl ? `[:pr-${pr.prStatus?.toLowerCase()}: #${pr.prNumber}](${pr.prUrl})` : "", - ) - .filter(Boolean) - .join(", "); - // show pr status if exists - return `${ts.branch}: ${prStatus || getBackportStatusEmoji(ts.status)}`; - }) - .join(", "); - return `[${middleTruncated(60, bf.commitMessage)}](${bf.prUrl}) ➡️ ${targetsStatuses}`; + const tag = bf.prAuthor ? authorTags.get(bf.prAuthor) || "" : ""; + return `[${middleTruncated(60, bf.commitMessage)}](${bf.prUrl}) ➡️ _❗ Might need backport_ ${tag}`.trim(); }) .join("\n") } - ${ - // finished, show pr numbers inline and + // in-progress/needed, show detailed status with author tags bugfixCommits .filter( (e) => e.backportTargetStatus?.length && - e.backportTargetStatus.every((e) => e.status === "completed"), + e.backportTargetStatus.some((t) => t.status !== "completed" && t.status !== "not-needed"), ) - .filter((_e) => false) - // .filter((e) => false) // show nothing for now .map((bf) => { const targetsStatuses = bf.backportTargetStatus .map((ts) => { + if (ts.status === "not-needed") return `${ts.branch}: ➖`; const prStatus = ts.prs .map((pr) => pr.prUrl ? `[:pr-${pr.prStatus?.toLowerCase()}: #${pr.prNumber}](${pr.prUrl})` : "", ) .filter(Boolean) .join(", "); - // show pr status if exists return `${ts.branch}: ${prStatus || getBackportStatusEmoji(ts.status)}`; }) .join(", "); - return `[#${bf.prNumber}](${bf.prUrl}) ➡️ ${targetsStatuses}`; + const tag = bf.prAuthor ? authorTags.get(bf.prAuthor) || "" : ""; + return `[${middleTruncated(60, bf.commitMessage)}](${bf.prUrl}) ➡️ ${targetsStatuses} ${tag}`.trim(); }) - .join(", ") + .join("\n") } `; - // _by [backport-checker](https://github.com/Comfy-Org/Comfy-PR/tree/HEAD/app/tasks/gh-frontend-backport-checker/index.ts)_ const formattedReport = await prettier.format(rawReport, { parser: "markdown" }); logger.info(formattedReport); @@ -514,19 +785,22 @@ ${ task = await save({ ...task, bugfixCommits }); // - now lets upsert slack message - - process.env.DRY_RUN = ""; - - if (formattedReport.trim() !== task.slackMessage?.text?.trim()) { - const msg = await upsertSlackMarkdownMessage({ - channelName: config.slackChannelName, - markdown: formattedReport, - url: task.slackMessage?.url, - }); - task = await save({ - ...task, - slackMessage: { text: msg.text, channel: msg.channel, url: msg.url }, - }); + if (isDryRun) { + logger.info("DRY RUN: Would send/update Slack message to #" + config.slackChannelName); + } else { + process.env.DRY_RUN = ""; + + if (formattedReport.trim() !== task.slackMessage?.text?.trim()) { + const msg = await upsertSlackMarkdownMessage({ + channelName: config.slackChannelName, + markdown: formattedReport, + url: task.slackMessage?.url, + }); + task = await save({ + ...task, + slackMessage: { text: msg.text, channel: msg.channel, url: msg.url }, + }); + } } return { ...task, diff --git a/app/tasks/gh-issue-transfer-comfyui-to-frontend/index.spec.ts b/app/tasks/gh-issue-transfer-comfyui-to-frontend/index.spec.ts index bb75a92f..763e5236 100644 --- a/app/tasks/gh-issue-transfer-comfyui-to-frontend/index.spec.ts +++ b/app/tasks/gh-issue-transfer-comfyui-to-frontend/index.spec.ts @@ -21,7 +21,9 @@ const createMockCollection = (collectionName?: string) => { const f = filter as Record; // Check operations first for backward compatibility const op = dbOperations.find( - (op) => (op as { filter?: { sourceIssueNumber?: number } }).filter?.sourceIssueNumber === f?.sourceIssueNumber, + (op) => + (op as { filter?: { sourceIssueNumber?: number } }).filter?.sourceIssueNumber === + f?.sourceIssueNumber, ); if (op) return (op as { data?: unknown }).data || null; // Check in-memory docs @@ -33,7 +35,7 @@ const createMockCollection = (collectionName?: string) => { return null; }, findOneAndUpdate: async (filter: unknown, update: unknown) => { - const data = { ...filter as object, ...(update as { $set?: object }).$set }; + const data = { ...(filter as object), ...(update as { $set?: object }).$set }; dbOperations.push({ filter, data }); return data; }, @@ -45,7 +47,7 @@ const createMockCollection = (collectionName?: string) => { }, insertOne: async (doc: unknown) => { const id = `mock_id_${++docIdCounter}`; - const docWithId = { ...doc as object, _id: id }; + const docWithId = { ...(doc as object), _id: id }; docs.set(id, docWithId); return { insertedId: id }; }, diff --git a/app/tasks/gh-issue-transfer-comfyui-to-workflow_templates/index.spec.ts b/app/tasks/gh-issue-transfer-comfyui-to-workflow_templates/index.spec.ts index cb679169..0937d777 100644 --- a/app/tasks/gh-issue-transfer-comfyui-to-workflow_templates/index.spec.ts +++ b/app/tasks/gh-issue-transfer-comfyui-to-workflow_templates/index.spec.ts @@ -1,5 +1,10 @@ import { server } from "@/src/test/msw-setup"; -import { createMockDb, getMockDbDocuments, insertMockDbDocument, resetMockDb } from "@/src/test/mockDb"; +import { + createMockDb, + getMockDbDocuments, + insertMockDbDocument, + resetMockDb, +} from "@/src/test/mockDb"; import { afterEach, beforeEach, describe, expect, it } from "bun:test"; import { http, HttpResponse } from "msw"; diff --git a/app/tasks/gh-issue-transfer-frontend-to-comfyui/index.spec.ts b/app/tasks/gh-issue-transfer-frontend-to-comfyui/index.spec.ts index ccb321f2..34831a89 100644 --- a/app/tasks/gh-issue-transfer-frontend-to-comfyui/index.spec.ts +++ b/app/tasks/gh-issue-transfer-frontend-to-comfyui/index.spec.ts @@ -107,17 +107,14 @@ describe("GithubFrontendToComfyuiIssueTransferTask", () => { }, ), // Mock creating issue in target repo - http.post( - "https://api.github.com/repos/Comfy-Org/ComfyUI/issues", - async ({ request }) => { - createdIssue = await request.json(); - return HttpResponse.json({ - number: 456, - html_url: "https://github.com/Comfy-Org/ComfyUI/issues/456", - ...createdIssue, - }); - }, - ), + http.post("https://api.github.com/repos/Comfy-Org/ComfyUI/issues", async ({ request }) => { + createdIssue = await request.json(); + return HttpResponse.json({ + number: 456, + html_url: "https://github.com/Comfy-Org/ComfyUI/issues/456", + ...createdIssue, + }); + }), // Mock creating comment on source issue http.post( "https://api.github.com/repos/Comfy-Org/ComfyUI_frontend/issues/123/comments", @@ -379,18 +376,15 @@ describe("GithubFrontendToComfyuiIssueTransferTask", () => { return HttpResponse.json([]); }, ), - http.post( - "https://api.github.com/repos/Comfy-Org/ComfyUI/issues", - async ({ request }) => { - const body: unknown = await request.json(); - issuesCreated++; - const issueNumber = parseInt(body.title.split(" ")[1]); - return HttpResponse.json({ - number: issueNumber + 10000, - html_url: `https://github.com/Comfy-Org/ComfyUI/issues/${issueNumber + 10000}`, - }); - }, - ), + http.post("https://api.github.com/repos/Comfy-Org/ComfyUI/issues", async ({ request }) => { + const body: unknown = await request.json(); + issuesCreated++; + const issueNumber = parseInt(body.title.split(" ")[1]); + return HttpResponse.json({ + number: issueNumber + 10000, + html_url: `https://github.com/Comfy-Org/ComfyUI/issues/${issueNumber + 10000}`, + }); + }), http.post( "https://api.github.com/repos/Comfy-Org/ComfyUI_frontend/issues/:issue_number/comments", () => { diff --git a/app/tasks/gh-priority-sync/index.spec.ts b/app/tasks/gh-priority-sync/index.spec.ts index cc5f45c8..595ee000 100644 --- a/app/tasks/gh-priority-sync/index.spec.ts +++ b/app/tasks/gh-priority-sync/index.spec.ts @@ -26,7 +26,15 @@ mock.module("@/src/parseIssueUrl", () => ({ issue_number: parseInt(match[3]), }; }, - stringifyIssueUrl: ({ owner, repo, issue_number }: { owner: string; repo: string; issue_number: number }) => { + stringifyIssueUrl: ({ + owner, + repo, + issue_number, + }: { + owner: string; + repo: string; + issue_number: number; + }) => { return `https://github.com/${owner}/${repo}/issues/${issue_number}`; }, })); diff --git a/bot/cli.ts b/bot/cli.ts index 095143a0..4cb7c6dd 100755 --- a/bot/cli.ts +++ b/bot/cli.ts @@ -39,6 +39,9 @@ import yaml from "yaml"; // Notion ability import { searchNotion } from "@/lib/notion/search"; +// Video ability +import { readVideo } from "@/lib/video/read-video"; + /** * Load environment variables from .env.local in the project root * This allows prbot to work from unknown directory @@ -75,8 +78,7 @@ async function handlePrCommand(args: { const { repo, base = "main", head, prompt } = args; // Import here to avoid circular dependencies - // @ts-ignore - z-chat-completion may not have type declarations - const zChatCompletion = (await import("z-chat-completion")).default; + const zChatCompletion = (await import("../lib/zChat")).default; const z = (await import("zod")).default; let finalHead = head; @@ -922,6 +924,100 @@ async function main() { } }, ) + .command("video", "Video analysis commands", (yargs) => { + return yargs + .command( + "read", + "Analyze a video file using AI vision (Gemini or GPT-4o)", + (y) => + y + .option("file", { + alias: "f", + type: "string", + describe: "Local video file path", + }) + .option("slack-file", { + type: "string", + describe: "Slack file ID to download and analyze", + }) + .option("slack-url", { + type: "string", + describe: "Slack file URL to download and analyze", + }) + .option("model", { + alias: "m", + type: "string", + default: "gemini", + describe: "Model to use: gemini or gpt4o", + }) + .option("prompt", { + alias: "p", + type: "string", + describe: "Custom analysis prompt", + }) + .check((argv) => { + const sources = [argv.file, argv["slack-file"], argv["slack-url"]].filter(Boolean); + if (sources.length === 0) { + throw new Error("One of --file, --slack-file, or --slack-url is required"); + } + if (sources.length > 1) { + throw new Error( + "Only one of --file, --slack-file, or --slack-url can be specified", + ); + } + if (!["gemini", "gpt4o"].includes(argv.model as string)) { + throw new Error("Model must be 'gemini' or 'gpt4o'"); + } + return true; + }), + async (args) => { + await loadEnvLocal(); + + let videoPath = args.file as string | undefined; + + // Download from Slack if needed + if (args["slack-file"] || args["slack-url"]) { + let fileId = args["slack-file"] as string | undefined; + + if (args["slack-url"]) { + const parsed = parseSlackUrlSmart(args["slack-url"] as string); + if (!parsed.fileId) { + console.error("Could not extract file ID from Slack URL"); + process.exit(1); + } + fileId = parsed.fileId; + } + + // Download to temp path + const fileInfo = await getSlackFileInfo(fileId!); + const fileName = fileInfo.name || `video-${fileId}`; + const tmpPath = `/tmp/${fileName}`; + console.log(`Downloading Slack file ${fileId} → ${tmpPath}`); + await downloadSlackFile(fileId!, tmpPath); + videoPath = tmpPath; + } + + console.log(`Analyzing video: ${videoPath}`); + console.log(`Model: ${args.model}`); + if (args.prompt) + console.log(`Custom prompt: ${(args.prompt as string).substring(0, 80)}...`); + console.log("---"); + + const result = await readVideo(videoPath!, { + model: args.model as "gemini" | "gpt4o", + prompt: args.prompt as string | undefined, + }); + + console.log(result.description); + console.log("\n---"); + console.log(`Model: ${result.model}`); + console.log(`Usage: ${JSON.stringify(result.usage)}`); + if (result.mdPath) console.log(`Report saved: ${result.mdPath}`); + }, + ) + .demandCommand(1, "Please specify a video subcommand") + .help(); + }) .command("agent", "Agent control commands", (yargs) => { return yargs.command( "respond-slack-msg ", diff --git a/bot/code/prbot.ts b/bot/code/prbot.ts index 166d8739..a50427ce 100755 --- a/bot/code/prbot.ts +++ b/bot/code/prbot.ts @@ -1,8 +1,7 @@ #!/usr/bin/env bun import minimist from "minimist"; import { spawnSubAgent } from "./pr-agent"; -// @ts-ignore -import zChatCompletion from "z-chat-completion"; +import zChatCompletion from "../../lib/zChat"; import z from "zod"; const CC_TYPES = "feat|fix|build|chore|ci|docs|style|refactor|perf|test|revert"; diff --git a/bot/slack-bolt.ts b/bot/slack-bolt.ts index 6ca7da02..6b307530 100644 --- a/bot/slack-bolt.ts +++ b/bot/slack-bolt.ts @@ -11,12 +11,7 @@ import { slack, slackCached } from "@/lib"; import winston from "winston"; import { parseSlackMessageToMarkdown } from "@/lib/slack/parseSlackMessageToMarkdown"; import sflow from "sflow"; -import { - streamText, - tool, - type ModelMessage, - type ToolSet, -} from "ai"; +import { streamText, tool, type ModelMessage, type ToolSet } from "ai"; import { openai } from "@ai-sdk/openai"; import { tsmatch } from "@/packages/mongodb-pipeline-ts/Task"; // tap and type imports removed (unused) diff --git a/bot/slack-bot.ts b/bot/slack-bot.ts index 14b09d26..7971eda2 100644 --- a/bot/slack-bot.ts +++ b/bot/slack-bot.ts @@ -17,8 +17,7 @@ import { fromStdio } from "from-node-stream"; import { mkdir } from "fs/promises"; import sflow from "sflow"; import winston from "winston"; -// @ts-ignore -import zChatCompletion from "z-chat-completion"; +import zChatCompletion from "../lib/zChat"; import z from "zod"; import { IdleWaiter } from "./IdleWaiter"; import { RestartManager } from "./RestartManager"; @@ -517,17 +516,37 @@ async function spawnBotOnSlackMessageEvent(event: z.infer 0 && { - attachments: m.attachments.map((a: unknown) => { - const attachment = a as z.infer; - return { - title: attachment.title, - title_link: attachment.title_link, - text: attachment.text, - fallback: attachment.fallback, - image_url: attachment.image_url, - from_url: attachment.from_url, - }; - }), + attachments: await Promise.all( + m.attachments.map(async (a: unknown) => { + const attachment = a as z.infer; + // Parse from_url to extract channel name + let from_channel: string | undefined; + if (attachment.from_url) { + try { + const parsed = slackMessageUrlParse(attachment.from_url); + const channelInfo = await slack.conversations.info({ channel: parsed.channel }); + from_channel = channelInfo.channel?.name + ? `#${channelInfo.channel.name}` + : undefined; + } catch { + // Ignore parsing errors + } + } + return { + title: attachment.title, + title_link: attachment.title_link, + text: attachment.text + ? await parseSlackMessageToMarkdown(attachment.text) + : undefined, + fallback: attachment.fallback + ? await parseSlackMessageToMarkdown(attachment.fallback) + : undefined, + image_url: attachment.image_url, + from_url: attachment.from_url, + from_channel, // Add resolved channel name + }; + }), + ), }), ...(m.reactions && m.reactions.length > 0 && { @@ -584,7 +603,10 @@ The user sent a new message in a Slack thread where I am already assisting them ${event.text} The thread's recent messages are: -${tap((data) => logger.debug("Thread messages:", { data }))( +${((data: string) => { + logger.debug("Thread messages:", { data }); + return data; +})( yaml.stringify( nearbyMessages.toSorted(compareBy((e) => +(e.ts || 0))), // sort by ts asc ), @@ -1191,11 +1213,19 @@ IMPORTANT WORKSPACE CONVENTIONS: `New stable output detected, length: ${newStable.length}, news length: ${news.length}`, ); - const my_internal_thoughts = tr.render().split("\n").slice(-80).join("\n"); + const rawTerminalOutput = tr.render().split("\n").slice(-80).join("\n"); + const my_internal_thoughts = cleanTerminalOutput(rawTerminalOutput); // const my_internal_thoughts = tr.tail(80); + logger.debug( + "Raw terminal output (before cleaning): " + + yaml.stringify({ preview: rawTerminalOutput.slice(0, 200) }), + ); logger.info( - "Unsent preview: " + - yaml.stringify({ preview: news.slice(0, 200), my_internal_thoughts }), + "Cleaned output preview: " + + yaml.stringify({ + preview: my_internal_thoughts.slice(0, 200), + news_preview: news.slice(0, 200), + }), ); // send update to slack @@ -1217,24 +1247,38 @@ RULES: - If my_internal_thoughts contains new information, append it to the relevant sections in my_response_md_original. - If my_internal_thoughts indicates completion of a task, add a "Tasks" section at the end of my_response_md_original with - [x] mark. - Ensure my_response_md_updated is clear and concise. -- Use **bold** to highlight unknown new sections or important updates. and remove previeous highlighted sections if not important anymore. -If all infomations from my_internal_thoughts are already contained in my_response_md_original, you can feel free to return {my_response_md_updated: "__NOTHING_CHANGED__"} - -- IMPORTANT NOTES: - -- KEEP message very short and informative, use url links to reference documents/repos instead of pasting large contents. -- Response Message should be short and in up to 16 lines, the agent will post long report by .md files. -- Focus on end-user's question or intent's helpful contents -- DO NOT INCLUDE ANY internal-only or debugging contexts, system info, local paths, etc IN my_response_md_updated. -- my_internal_thoughts may contain terminal control characters and environment system info, ignore them and only focus on the end-user-helpful content. -- YOU CAN ONLY change/remove/add up to 1 line! -- Describe what you are currently doing in up to 7 words! less is better. -- Don't show unknown ERRORs to user, they will be recorded into ERROR logs and solve by bot-developers anyway. -- DONT ASK ME ANY QUESTIONS IN YOUR RESPONSE. JUST FIND NECESSARY INFORMATION BY YOUR SELF AND SHOW YOUR BEST UNDERSTANDING. -- Output the my_response_md_updated in standard markdown format (github favored). -- LENGTH LIMIT: my_response_md_updated must be within 4000 characters. SYSTEM WILL TRUNCATE IF EXCEEDING THIS LIMIT. - -- MOST_IMPORTANT: Keep the my_response_md_original's context and formatting and contents as much as possible, only update a few lines that need to be updated based on my_internal_thoughts. +- Use **bold** to highlight new sections or important updates. Remove previously highlighted sections if they're no longer relevant. +- If all information from my_internal_thoughts is already contained in my_response_md_original, return: {my_response_md_updated: "__NOTHING_CHANGED__"} + +CRITICAL FILTERING RULES (Non-negotiable): +- KEEP ONLY: User-facing progress, task completion status, findings relevant to user's intent, next steps +- REMOVE: File paths, system info, debug output, error stack traces, internal process details, development notes +- EXAMPLES OF WHAT TO REMOVE: + - "/bot/slack/channel-id/timestamp" (internal paths) + - "undefined/null received in chunk" (internal errors) + - "DEBUG: ..." (debug output) + - "✓ Created /tmp/cache/..." (internal file operations) + - "[2026-02-20T15:10:40.123Z]" (timestamps) + +TONE & LENGTH: +- KEEP message very short and informative, use url links to reference documents/repos instead of pasting large contents +- Response should be up to 16 lines maximum (agent posts long reports as .md files) +- Focus ONLY on end-user's question or intent's helpful contents +- Describe current progress in up to 7 words (less is better) +- Avoid jargon; write for non-technical users when possible + +FORMAT REQUIREMENTS: +- Output in standard markdown format (GitHub flavored) +- YOU CAN ONLY change/remove/add up to 1 line per update! +- LENGTH LIMIT: Must be within 4000 characters (system will truncate if exceeding) +- MOST IMPORTANT: Keep my_response_md_original's context and formatting mostly unchanged, only update necessary lines + +DO NOT: +- Ask the user questions +- Include error details (they're logged separately for developers) +- Show code blocks or technical configs +- Show internal process logs or environment variables +- Show any paths starting with "/" or "./" - Here's Contexts in YAML for your respondse: @@ -1243,6 +1287,21 @@ ${yaml.stringify(contexts)} `) as { my_response_md_updated: string }; + + // Log raw my_response_md_updated to JSONL file for debugging + const responseLogEntry = { + timestamp: new Date().toISOString(), + workspaceId, + stage: "raw_from_claude", + my_response_md_updated_raw: updateResponseResp.my_response_md_updated, + my_internal_thoughts_preview: my_internal_thoughts.slice(0, 500), + my_response_md_original: quickRespondMsg.text || "", + }; + await appendFile( + ".logs/my_response_md_updated.jsonl", + JSON.stringify(responseLogEntry) + "\n", + ).catch(() => {}); + const updated_response_full = await mdFmt( updateResponseResp.my_response_md_updated .trim() @@ -1257,6 +1316,20 @@ ${yaml.stringify(contexts)} updated_response_full.slice(-2000) : updated_response_full; + // Log final processed my_response_md_updated + const finalLogEntry = { + timestamp: new Date().toISOString(), + workspaceId, + stage: "final_processed", + my_response_md_updated_final: my_response_md_updated, + was_truncated: updated_response_full.length > 4000, + original_length: updated_response_full.length, + }; + await appendFile( + ".logs/my_response_md_updated.jsonl", + JSON.stringify(finalLogEntry) + "\n", + ).catch(() => {}); + if (quickRespondMsg.ts && quickRespondMsg.channel) { await safeSlackUpdateMessage(slack, { channel: quickRespondMsg.channel, @@ -1417,6 +1490,65 @@ function commonPrefix(...args: string[]): string { } return prefix; } + +/** + * Clean terminal output by removing ANSI codes, debug info, and system paths + * This ensures Claude only sees user-meaningful progress information + */ +function cleanTerminalOutput(text: string): string { + // Remove ANSI color codes and escape sequences + text = text.replace(/\x1b\[[0-9;]*m/g, ""); + text = text.replace(/\x1b\[[^m]*m/g, ""); + text = text.replace(/\u0007/g, ""); // Bell character + text = text.replace(/\r/g, ""); // Carriage returns + + // Remove box drawing characters (Claude Code banner) + text = text.replace(/[▐▛▜▘▝█▌▙▟▞▚░▒▓│┃├┤┬┴┼─═║╔╗╚╝╠╣╦╩╬]/g, ""); + + // Filter lines to remove debug noise + const lines = text.split("\n").filter((line) => { + const trimmed = line.trim(); + + // Skip empty or whitespace-only lines + if (!trimmed) return true; + + // Skip timestamp-prefixed log lines (multiple formats) + // Format 1: [2026-02-20T15:10:40.123Z] + if (/^\[[\d\-T:.Z]+\]/.test(trimmed)) return false; + // Format 2: 2026-02-20 15:42:09 [info]: + if (/^\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}\s+\[/.test(trimmed)) return false; + + // Skip warning lines + if (/^⚠|^Warning:|^WARN:|^\[warn\]/i.test(trimmed)) return false; + + // Skip debug/verbose/trace/info prefixed lines + if (/^(DEBUG|VERBOSE|TRACE|INFO):/i.test(trimmed)) return false; + if (/\[(debug|verbose|trace|info)\]:/i.test(trimmed)) return false; + + // Skip claude-yes specific output + if (/\[claude-yes\]|claude-yes|Spawned claude|PID \d+/i.test(trimmed)) return false; + if (/Claude Code v\d|Opus \d|Claude Max/i.test(trimmed)) return false; + + // Skip lines containing system paths anywhere + if (/\/bot\/slack\/|\/codes\/|\.logs\/|\/repos\/|\/tmp\//i.test(trimmed)) return false; + + // Skip undefined/null error indicators + if (/received undefined\/null|undefined\/null/i.test(trimmed)) return false; + + // Skip deprecation warnings + if (/deprecated|--exit-on-idle|-e are deprecated/i.test(trimmed)) return false; + + // Skip pure terminal control output or lines that are mostly special chars + if (/^(\s*|cursor\s+|bell|bel|\x07)$/i.test(trimmed)) return false; + + // Skip lines that are mostly whitespace or contain only special characters + if (/^[\s\u2000-\u206F\u2500-\u257F]*$/.test(trimmed)) return false; + + return true; + }); + + return lines.join("\n").trim(); +} function sanitized(name: string) { return name.replace(/[^a-zA-Z0-9-_]/g, "_").slice(0, 50); } diff --git a/bot/utils/filterDebugMessages.ts b/bot/utils/filterDebugMessages.ts index 01f12206..52c938c4 100644 --- a/bot/utils/filterDebugMessages.ts +++ b/bot/utils/filterDebugMessages.ts @@ -34,7 +34,6 @@ const DEBUG_PATTERNS = [ // ANSI escape codes (terminal colors/formatting) /\x1b\[[0-9;]*[mGKHf]/g, - /\033\[[0-9;]*[mGKHf]/g, // Terminal control characters /[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F]/g, diff --git a/bun.lock b/bun.lock index 358182b4..57122797 100644 --- a/bun.lock +++ b/bun.lock @@ -9,6 +9,7 @@ "@ai-sdk/openai": "^3.0.19", "@auth/mongodb-adapter": "^3.4.2", "@ctrl/mac-address": "^3.0.3", + "@google/generative-ai": "^0.24.1", "@hookform/resolvers": "^5.1.1", "@kbox-labs/react-echarts": "^1.2.0", "@keyv/mongo": "^3.0.5", @@ -373,6 +374,8 @@ "@gar/promisify": ["@gar/promisify@1.1.3", "", {}, "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw=="], + "@google/generative-ai": ["@google/generative-ai@0.24.1", "", {}, "sha512-MqO+MLfM6kjxcKoy0p1wRzG3b4ZZXtPI+z2IE26UogS2Cm/XHO+7gGRBh6gcJsOiIVoH93UwKvW4HdgiOZCy9Q=="], + "@hapi/bourne": ["@hapi/bourne@3.0.0", "", {}, "sha512-Waj1cwPXJDucOib4a3bAISsKJVb15MKi9IvmTI/7ssVEm6sywXGjVJDhl6/umt1pK1ZS7PacXU3A1PmFKHEZ2w=="], "@hookform/resolvers": ["@hookform/resolvers@5.2.2", "", { "dependencies": { "@standard-schema/utils": "^0.3.0" }, "peerDependencies": { "react-hook-form": "^7.55.0" } }, "sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA=="], @@ -1969,7 +1972,7 @@ "inline-style-parser": ["inline-style-parser@0.2.4", "", {}, "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q=="], - "inliner": ["inliner@github:reimertz/inliner#a1219ee", { "dependencies": { "ansi-escapes": "^1.4.0", "ansi-styles": "^2.2.1", "chalk": "^1.1.3", "charset": "^1.0.0", "cheerio": "^0.19.0", "debug": "^2.2.0", "es6-promise": "^2.3.0", "iconv-lite": "^0.4.11", "jschardet": "^1.3.0", "lodash.assign": "^3.2.0", "lodash.defaults": "^3.1.2", "lodash.foreach": "^3.0.3", "mime": "^1.3.4", "minimist": "^1.1.3", "request": "^2.74.0", "svgo": "^0.6.6", "then-fs": "^2.0.0", "uglify-js": "^2.6.2", "update-notifier": "^0.5.0" }, "bin": { "inliner": "cli/index.js" } }, "reimertz-inliner-a1219ee"], + "inliner": ["inliner@github:reimertz/inliner#a1219ee", { "dependencies": { "ansi-escapes": "^1.4.0", "ansi-styles": "^2.2.1", "chalk": "^1.1.3", "charset": "^1.0.0", "cheerio": "^0.19.0", "debug": "^2.2.0", "es6-promise": "^2.3.0", "iconv-lite": "^0.4.11", "jschardet": "^1.3.0", "lodash.assign": "^3.2.0", "lodash.defaults": "^3.1.2", "lodash.foreach": "^3.0.3", "mime": "^1.3.4", "minimist": "^1.1.3", "request": "^2.74.0", "svgo": "^0.6.6", "then-fs": "^2.0.0", "uglify-js": "^2.6.2", "update-notifier": "^0.5.0" }, "bin": { "inliner": "cli/index.js" } }, "reimertz-inliner-a1219ee", "sha512-eA2JHSdj4paZRyZ1rumVAsuxjll9i8z2VVpdBO2Vdm13lWrjlBTCYtm7OIQVbOdeoQInMiH+WjcIA0vTTjKKjA=="], "internal-slot": ["internal-slot@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "hasown": "^2.0.2", "side-channel": "^1.1.0" } }, "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw=="], diff --git a/lib/slack/msg-read-nearby.ts b/lib/slack/msg-read-nearby.ts index f88d4f41..b9465624 100644 --- a/lib/slack/msg-read-nearby.ts +++ b/lib/slack/msg-read-nearby.ts @@ -144,11 +144,14 @@ export async function readNearbyMessages( ...(m.files && m.files.length > 0 && { files: m.files.map((f) => ({ + id: f.id, name: f.name, title: f.title, mimetype: f.mimetype, + filetype: f.filetype, size: f.size, url_private: f.url_private, + url_private_download: f.url_private_download, permalink: f.permalink, })), }), diff --git a/lib/slack/msg-read-recent.ts b/lib/slack/msg-read-recent.ts index f1ae0883..a5dfff36 100644 --- a/lib/slack/msg-read-recent.ts +++ b/lib/slack/msg-read-recent.ts @@ -91,8 +91,10 @@ export async function readRecentMessages(channel: string, limit: number = 10) { name: f.name, title: f.title, mimetype: f.mimetype, + filetype: f.filetype, size: f.size, url_private: f.url_private, + url_private_download: f.url_private_download, permalink: f.permalink, })), }), diff --git a/lib/slack/msg-read-thread.ts b/lib/slack/msg-read-thread.ts index dc53eab9..b72238f8 100644 --- a/lib/slack/msg-read-thread.ts +++ b/lib/slack/msg-read-thread.ts @@ -82,11 +82,14 @@ export async function readSlackThread(channel: string, ts: string, limit: number ...(m.files && m.files.length > 0 && { files: m.files.map((f) => ({ + id: f.id, name: f.name, title: f.title, mimetype: f.mimetype, + filetype: f.filetype, size: f.size, url_private: f.url_private, + url_private_download: f.url_private_download, permalink: f.permalink, })), }), diff --git a/lib/slack/slackTsToISO.ts b/lib/slack/slackTsToISO.ts index 4c7d64b8..41313a07 100644 --- a/lib/slack/slackTsToISO.ts +++ b/lib/slack/slackTsToISO.ts @@ -5,7 +5,7 @@ */ export function slackTsToISO(ts: string): string { const [seconds, microseconds] = ts.split("."); - const milliseconds = parseInt(seconds) * 1000 + parseInt(microseconds.slice(0, 3)); + const milliseconds = parseInt(seconds) * 1000 + parseInt((microseconds || "000").slice(0, 3)); return new Date(milliseconds).toISOString(); } diff --git a/lib/video/read-video.ts b/lib/video/read-video.ts new file mode 100644 index 00000000..3f013867 --- /dev/null +++ b/lib/video/read-video.ts @@ -0,0 +1,227 @@ +#!/usr/bin/env bun + +import { GoogleGenerativeAI } from "@google/generative-ai"; +import OpenAI from "openai"; +import fs from "fs"; +import path from "path"; +import { parseArgs } from "util"; + +const DEFAULT_PROMPT = `Analyze this video recording in detail. Describe: +1. Step-by-step actions taken (in chronological order) +2. Any UI elements, buttons, menus, or dialogs shown +3. Temporal changes — what changed from start to end +4. Any errors, warnings, or unexpected behavior visible +5. The overall workflow or task being performed + +Be specific about timestamps and transitions.`; + +export interface ReadVideoOptions { + prompt?: string; + model?: "gemini" | "gpt4o"; + outputMd?: boolean; +} + +export interface ReadVideoResult { + description: string; + model: string; + usage: Record; + mdPath?: string; +} + +/** + * Analyze a video file using AI vision models. + * + * @param videoPath - Path to the video file + * @param options - Model selection and custom prompt + * @returns Analysis description, model used, and usage stats + */ +export async function readVideo( + videoPath: string, + options: ReadVideoOptions = {}, +): Promise { + const { prompt = DEFAULT_PROMPT, model = "gemini", outputMd = true } = options; + + if (!fs.existsSync(videoPath)) { + throw new Error(`Video file not found: ${videoPath}`); + } + + const result = + model === "gpt4o" + ? await readVideoWithGpt4o(videoPath, prompt) + : await readVideoWithGemini(videoPath, prompt); + + if (outputMd) { + const mdPath = videoPath.replace(/\.[^.]+$/, "") + ".md"; + const mdContent = `# Video Analysis\n\n**Source**: \`${path.basename(videoPath)}\`\n**Model**: ${result.model}\n**Usage**: ${JSON.stringify(result.usage)}\n\n---\n\n${result.description}\n`; + fs.writeFileSync(mdPath, mdContent); + result.mdPath = mdPath; + } + + return result; +} + +/** + * Analyze video natively with Gemini 2.5 Flash (supports video/* inline data) + */ +async function readVideoWithGemini(videoPath: string, prompt: string): Promise { + const apiKey = process.env.GEMINI_API_KEY; + if (!apiKey) throw new Error("GEMINI_API_KEY environment variable is required"); + + const genAI = new GoogleGenerativeAI(apiKey); + const model = genAI.getGenerativeModel({ model: "gemini-2.5-flash" }); + + const videoBuffer = fs.readFileSync(videoPath); + const base64Video = videoBuffer.toString("base64"); + const mimeType = getMimeType(videoPath); + + const result = await model.generateContent([ + { text: prompt }, + { + inlineData: { + mimeType, + data: base64Video, + }, + }, + ]); + + const response = result.response; + const text = response.text(); + const usageMetadata = response.usageMetadata; + + return { + description: text, + model: "gemini-2.5-flash", + usage: { + promptTokens: usageMetadata?.promptTokenCount, + candidateTokens: usageMetadata?.candidatesTokenCount, + totalTokens: usageMetadata?.totalTokenCount, + }, + }; +} + +/** + * Analyze video with GPT-4o by extracting frames with ffmpeg + */ +async function readVideoWithGpt4o(videoPath: string, prompt: string): Promise { + const openai = new OpenAI(); + const frames = await extractFrames(videoPath); + + if (frames.length === 0) { + throw new Error("No frames could be extracted from the video. Is ffmpeg installed?"); + } + + const imageMessages: OpenAI.Chat.Completions.ChatCompletionContentPart[] = frames.map( + (frame) => ({ + type: "image_url" as const, + image_url: { + url: `data:image/jpeg;base64,${frame}`, + detail: "low" as const, + }, + }), + ); + + const result = await openai.chat.completions.create({ + model: "gpt-4o", + messages: [ + { + role: "user", + content: [ + { + type: "text", + text: `${prompt}\n\n(${frames.length} frames extracted at 1fps, sampled every other frame)`, + }, + ...imageMessages, + ], + }, + ], + max_tokens: 4096, + }); + + return { + description: result.choices[0].message.content || "", + model: "gpt-4o", + usage: { + promptTokens: result.usage?.prompt_tokens, + completionTokens: result.usage?.completion_tokens, + totalTokens: result.usage?.total_tokens, + framesUsed: frames.length, + }, + }; +} + +/** + * Extract frames from video at 1fps using ffmpeg, then sample every other frame + */ +async function extractFrames(videoPath: string): Promise { + const tmpDir = path.join(path.dirname(videoPath), `.frames-${Date.now()}`); + fs.mkdirSync(tmpDir, { recursive: true }); + + try { + const { $ } = await import("bun"); + // Extract at 1 fps + await $`ffmpeg -i ${videoPath} -vf "fps=1" -q:v 2 ${tmpDir}/frame-%04d.jpg -loglevel error`.quiet(); + + // Read all frames + const frameFiles = fs + .readdirSync(tmpDir) + .filter((f) => f.endsWith(".jpg")) + .sort(); + + // Sample every other frame to reduce token usage + const sampledFiles = frameFiles.filter((_, i) => i % 2 === 0); + + const frames: string[] = []; + for (const file of sampledFiles) { + const framePath = path.join(tmpDir, file); + const buffer = fs.readFileSync(framePath); + frames.push(buffer.toString("base64")); + } + + return frames; + } finally { + // Cleanup temp frames + fs.rmSync(tmpDir, { recursive: true, force: true }); + } +} + +function getMimeType(filePath: string): string { + const ext = path.extname(filePath).toLowerCase(); + const mimeMap: Record = { + ".mp4": "video/mp4", + ".webm": "video/webm", + ".mov": "video/quicktime", + ".avi": "video/x-msvideo", + ".mkv": "video/x-matroska", + ".m4v": "video/mp4", + }; + return mimeMap[ext] || "video/mp4"; +} + +// CLI interface +if (import.meta.main) { + const { values } = parseArgs({ + args: Bun.argv.slice(2), + options: { + file: { type: "string", short: "f" }, + model: { type: "string", short: "m" }, + prompt: { type: "string", short: "p" }, + }, + strict: false, + }); + + if (!values.file) { + console.error( + "Usage: bun lib/video/read-video.ts -f [-m gemini|gpt4o] [-p ]", + ); + process.exit(1); + } + + const result = await readVideo(values.file as string, { + model: (values.model as "gemini" | "gpt4o") || "gemini", + prompt: values.prompt as string | undefined, + }); + + console.log(`Model: ${result.model}`); + console.log(`Usage: ${JSON.stringify(result.usage)}`); + console.log(`\n${result.description}`); +} diff --git a/lib/zChat.ts b/lib/zChat.ts new file mode 100644 index 00000000..ecf1972b --- /dev/null +++ b/lib/zChat.ts @@ -0,0 +1,14 @@ +import type { ZodObject, ZodRawShape } from "zod/v4"; +import { z } from "zod/v4"; + +// Re-export zChatCompletion with simplified options type to avoid TS2589 +// ("Type instantiation is excessively deep") caused by Partial +let _mod: { default: Function } | undefined; + +export default function zChatCompletion( + schema: S | ZodObject, + options?: { model?: string; [key: string]: unknown }, +): (strings: TemplateStringsArray, ...values: unknown[]) => Promise>> { + _mod ??= require("z-chat-completion") as { default: Function }; + return _mod.default(schema, options); +} diff --git a/package.json b/package.json index 228dfb1c..9e1d1eaa 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "@ai-sdk/openai": "^3.0.19", "@auth/mongodb-adapter": "^3.4.2", "@ctrl/mac-address": "^3.0.3", + "@google/generative-ai": "^0.24.1", "@hookform/resolvers": "^5.1.1", "@kbox-labs/react-echarts": "^1.2.0", "@keyv/mongo": "^3.0.5", @@ -232,7 +233,7 @@ "oxlint --fix", "oxfmt --write" ], - "*": [ + "*.{json,css,md,yaml,yml,html}": [ "oxfmt --write" ] }, diff --git a/packages/mongodb-pipeline-ts/$pipeline.ts b/packages/mongodb-pipeline-ts/$pipeline.ts index 57c056f9..a1fa50d8 100644 --- a/packages/mongodb-pipeline-ts/$pipeline.ts +++ b/packages/mongodb-pipeline-ts/$pipeline.ts @@ -194,14 +194,14 @@ type Stages = { * $unset is an alias for $project stage that removes fields. */ unset(i: I): Pipeline>; /** Deconstructs an array field from the input documents to output a document for each element. Each output document replaces the array with an element value. For each input document, outputs n documents where n is the number of array elements and can be zero for an empty array. */ - unwind

>( + unwind

>( i: | `$${P}` | { path: `$${P}`; preserveNullAndEmptyArrays?: boolean; }, - ): Pipeline, FieldArrayPathValue[number]>>; + ): Pipeline, _FieldArrayPathValue[number]>>; /** Performs an ANN search on a vector in the specified field of an Atlas collection. * New in version 7.0.2. */ diff --git a/scripts/migrate-cnrepos-trim-data.ts b/scripts/migrate-cnrepos-trim-data.ts index 94e096bd..5747e6d1 100644 --- a/scripts/migrate-cnrepos-trim-data.ts +++ b/scripts/migrate-cnrepos-trim-data.ts @@ -107,7 +107,11 @@ function trimPullsArray( function trimCrPullsArray( crPulls: - | { data?: Array<{ pull?: Record; [key: string]: unknown }>; state?: string; mtime?: Date } + | { + data?: Array<{ pull?: Record; [key: string]: unknown }>; + state?: string; + mtime?: Date; + } | undefined, ) { if (!crPulls?.data) return crPulls; @@ -126,9 +130,7 @@ function trimCrPullsArray( body: c.body, updated_at: c.updated_at, created_at: c.created_at, - user: c.user - ? { login: (c.user as { login?: string }).login } - : undefined, + user: c.user ? { login: (c.user as { login?: string }).login } : undefined, })) : comments.data; // Preserve undefined return { diff --git a/src/EmailTasks.ts b/src/EmailTasks.ts index a4c0d850..075a56e1 100644 --- a/src/EmailTasks.ts +++ b/src/EmailTasks.ts @@ -87,7 +87,7 @@ export async function updateEmailTasks() { export async function sendEmailTask({ _id, state: _state, - name: _name, + name, from, to, subject, diff --git a/src/analyzePullsStatus.ts b/src/analyzePullsStatus.ts index f0448a24..bc2a29a1 100644 --- a/src/analyzePullsStatus.ts +++ b/src/analyzePullsStatus.ts @@ -50,19 +50,27 @@ export async function analyzePullsStatus({ .skip(skip) .limit(limit || 2 ** 31 - 1) .aggregate() - .map(({ updated_at, created_at: _created_at, actived_at, on_registry_at: _on_registry_at, ...pull }) => { - const pull_updated = - prettyMs(+new Date() - +new Date(updated_at), { compact: true }) + " ago"; - const repo_updated = - prettyMs(+new Date() - +new Date(actived_at), { compact: true }) + " ago"; - return { - updated: pull_updated, // @deprecated - pull_updated, - repo_updated, - ...pull, - lastcomment: pull.lastcomment?.replace(/\s+/g, " ").replace(/\*\*\*.*/g, "..."), - }; - }) + .map( + ({ + updated_at, + created_at: _created_at, + actived_at, + on_registry_at: _on_registry_at, + ...pull + }) => { + const pull_updated = + prettyMs(+new Date() - +new Date(updated_at), { compact: true }) + " ago"; + const repo_updated = + prettyMs(+new Date() - +new Date(actived_at), { compact: true }) + " ago"; + return { + updated: pull_updated, // @deprecated + pull_updated, + repo_updated, + ...pull, + lastcomment: pull.lastcomment?.replace(/\s+/g, " ").replace(/\*\*\*.*/g, "..."), + }; + }, + ) .toArray(); } export function baseCRPullStatusPipeline(): Pipeline< diff --git a/src/createGithubPullRequest.ts b/src/createGithubPullRequest.ts index 09d7d2ef..d6626e4d 100644 --- a/src/createGithubPullRequest.ts +++ b/src/createGithubPullRequest.ts @@ -100,9 +100,7 @@ export async function createGithubPullRequest({ }) ).data.filter((e) => e.title === title && e.body === body); - // // TODO: seems has bugs on head_repo - - if (sameContentPRList.length > 1) { + sameContentPRList.length <= 1 || DIE( new Error(`expect <= 1 same content pr, but got ${sameContentPRList.length}`, { cause: { @@ -110,7 +108,6 @@ export async function createGithubPullRequest({ }, }), ); - } const pr_result = // existedList[0] ?? diff --git a/src/test/mockDb.ts b/src/test/mockDb.ts index 93bcace0..7475522d 100644 --- a/src/test/mockDb.ts +++ b/src/test/mockDb.ts @@ -78,7 +78,7 @@ function createMockCollection(collectionName: string) { } if (existing) { - const updated = { ...existing.doc as object, ...update.$set }; + const updated = { ...(existing.doc as object), ...update.$set }; docs.set(existing.id, updated); return updated; } else if (options?.upsert) { diff --git a/src/updateAuthorsForGithub.ts b/src/updateAuthorsForGithub.ts index 3737b994..a821adff 100644 --- a/src/updateAuthorsForGithub.ts +++ b/src/updateAuthorsForGithub.ts @@ -36,25 +36,36 @@ export async function updateAuthorsForGithub() { ) .map((e) => TaskDataOrNull(e)) .filter() - .map(({ email, avatar_url, blog, updated_at: _updated_at, location, company, hireable, bio, login }) => - Authors.findOneAndUpdate( - { githubId: login }, - { - $set: { - githubMtime: new Date(), - ...(email && { email }), - ...(null != hireable && { hireable }), + .map( + ({ + email, + avatar_url, + blog, + updated_at: _updated_at, + location, + company, + hireable, + bio, + login, + }) => + Authors.findOneAndUpdate( + { githubId: login }, + { + $set: { + githubMtime: new Date(), + ...(email && { email }), + ...(null != hireable && { hireable }), + }, + $addToSet: { + ...(bio && { bios: bio }), + avatars: avatar_url, + ...(location && { locations: location }), + ...(blog && { blogs: blog }), + ...(company && { companies: company }), + }, }, - $addToSet: { - ...(bio && { bios: bio }), - avatars: avatar_url, - ...(location && { locations: location }), - ...(blog && { blogs: blog }), - ...(company && { companies: company }), - }, - }, - { upsert: true, returnDocument: "after" }, - ), + { upsert: true, returnDocument: "after" }, + ), ) .forEach(peekYaml) .done();