From 3b04e0476b1ce63fc0f7c83a3661b5624c62ab44 Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 19:56:18 +0000 Subject: [PATCH 1/7] feat(docs): add RSS feed and changelog page for documentation updates Add automatic RSS feed generation and a changelog page that displays recently updated documentation pages based on git commit history. - Add docs-rss plugin that generates RSS feed from git history - Add /docs/changelog page showing 20 most recently updated pages - Add RSS autodiscovery link in HTML head - Support `changelog: false` frontmatter to exclude pages - Respect custom slug frontmatter for URL generation Co-Authored-By: Claude Opus 4.5 --- .gitignore | 1 + documentation/changelog.mdx | 15 ++ documentation/sidebars.js | 9 ++ docusaurus.config.js | 12 ++ plugins/docs-rss/index.js | 242 +++++++++++++++++++++++++++++ src/components/Changelog/index.tsx | 120 ++++++++++++++ 6 files changed, 399 insertions(+) create mode 100644 documentation/changelog.mdx create mode 100644 plugins/docs-rss/index.js create mode 100644 src/components/Changelog/index.tsx diff --git a/.gitignore b/.gitignore index 5982b35a6..1cfcfb76d 100644 --- a/.gitignore +++ b/.gitignore @@ -17,6 +17,7 @@ plugins/*/compiled .cache-loader static/llms.txt static/reference-full.md +static/rss.xml static/web-console/*.json # Files generated by script validate_queries.py diff --git a/documentation/changelog.mdx b/documentation/changelog.mdx new file mode 100644 index 000000000..daf07da80 --- /dev/null +++ b/documentation/changelog.mdx @@ -0,0 +1,15 @@ +--- +title: Documentation Changelog +description: Recently updated documentation pages for QuestDB +sidebar_label: Changelog +changelog: false +--- + +import Changelog from "@site/src/components/Changelog" + +# Documentation Changelog + +This page lists the most recently updated documentation pages, helping you stay +informed about new content and improvements. + + diff --git a/documentation/sidebars.js b/documentation/sidebars.js index 020a9c6f2..5bdbc9b80 100644 --- a/documentation/sidebars.js +++ b/documentation/sidebars.js @@ -911,5 +911,14 @@ module.exports = { type: "link", href: "https://questdb.com/release-notes", }, + + // =================== + // CHANGELOG + // =================== + { + id: "changelog", + type: "doc", + label: "Documentation Changelog", + }, ].filter(Boolean), } diff --git a/docusaurus.config.js b/docusaurus.config.js index cadce8bd4..3231d9acf 100644 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -41,6 +41,17 @@ const config = { crossorigin: "anonymous", }, ], + headTags: [ + { + tagName: "link", + attributes: { + rel: "alternate", + type: "application/rss+xml", + title: "QuestDB Documentation RSS Feed", + href: "/docs/rss.xml", + }, + }, + ], scripts: [ { src: "https://widget.kapa.ai/kapa-widget.bundle.js", @@ -161,6 +172,7 @@ const config = { require.resolve("./plugins/raw-markdown/index"), require.resolve("./plugins/tailwind/index"), + require.resolve("./plugins/docs-rss/index"), [ "@docusaurus/plugin-pwa", { diff --git a/plugins/docs-rss/index.js b/plugins/docs-rss/index.js new file mode 100644 index 000000000..6126281c3 --- /dev/null +++ b/plugins/docs-rss/index.js @@ -0,0 +1,242 @@ +const fs = require("fs") +const path = require("path") +const { execSync } = require("child_process") +const matter = require("gray-matter") + +const FEED_ITEMS_COUNT = 20 + +/** + * Get the last git commit date for a file + */ +function getGitLastModified(filePath) { + try { + const timestamp = execSync( + `git log -1 --format=%cI -- "${filePath}"`, + { encoding: "utf-8", stdio: ["pipe", "pipe", "ignore"] } + ).trim() + return timestamp ? new Date(timestamp) : null + } catch { + return null + } +} + +/** + * Extract excerpt from markdown content + */ +function extractExcerpt(content, maxLength = 200) { + // Remove MDX imports and components + let text = content + .replace(/^import\s+.*$/gm, "") + .replace(/<[^>]+>/g, "") + // Remove code blocks + .replace(/```[\s\S]*?```/g, "") + // Remove inline code + .replace(/`[^`]+`/g, "") + // Remove headers + .replace(/^#{1,6}\s+.*$/gm, "") + // Remove links but keep text + .replace(/\[([^\]]+)\]\([^)]+\)/g, "$1") + // Remove images + .replace(/!\[[^\]]*\]\([^)]+\)/g, "") + // Remove admonitions + .replace(/^:::\w+[\s\S]*?^:::/gm, "") + // Remove HTML comments + .replace(//g, "") + // Normalize whitespace + .replace(/\s+/g, " ") + .trim() + + if (text.length > maxLength) { + text = text.substring(0, maxLength).replace(/\s+\S*$/, "") + "..." + } + + return text +} + +/** + * Escape XML special characters + */ +function escapeXml(str) { + if (!str) return "" + return str + .replace(/&/g, "&") + .replace(//g, ">") + .replace(/"/g, """) + .replace(/'/g, "'") +} + +/** + * Recursively get all markdown files + */ +function getAllMarkdownFiles(dir, files = []) { + const entries = fs.readdirSync(dir, { withFileTypes: true }) + + for (const entry of entries) { + const fullPath = path.join(dir, entry.name) + + if (entry.isDirectory()) { + getAllMarkdownFiles(fullPath, files) + } else if ( + (entry.name.endsWith(".md") || entry.name.endsWith(".mdx")) && + !entry.name.includes(".partial.") + ) { + files.push(fullPath) + } + } + + return files +} + +/** + * Generate RSS XML + */ +function generateRssXml(items, siteConfig) { + const siteUrl = siteConfig.url + siteConfig.baseUrl + const now = new Date().toUTCString() + + const itemsXml = items + .map( + (item) => ` + ${escapeXml(item.title)} + ${escapeXml(item.url)} + ${escapeXml(item.url)} + ${item.date.toUTCString()} + ${escapeXml(item.excerpt)} + ` + ) + .join("\n") + + return ` + + + ${escapeXml(siteConfig.title)} Documentation + ${escapeXml(siteUrl)} + ${escapeXml(siteConfig.tagline)} + en + ${now} + +${itemsXml} + +` +} + +/** + * Generate RSS feed items from documentation + */ +function generateFeedItems(docsDir, siteConfig) { + const markdownFiles = getAllMarkdownFiles(docsDir) + const items = [] + + for (const filePath of markdownFiles) { + try { + const fileContent = fs.readFileSync(filePath, "utf-8") + const { data: frontmatter, content } = matter(fileContent) + + // Skip drafts and pages excluded from changelog + if (frontmatter.draft === true || frontmatter.changelog === false) { + continue + } + + // Get title from frontmatter or first heading + let title = frontmatter.title + if (!title) { + const headingMatch = content.match(/^#\s+(.+)$/m) + title = headingMatch ? headingMatch[1] : path.basename(filePath, path.extname(filePath)) + } + + // Get description from frontmatter or extract from content + const excerpt = frontmatter.description || extractExcerpt(content) + + // Get last modified date from git + const lastModified = getGitLastModified(filePath) + if (!lastModified) { + continue + } + + // Build URL from file path, respecting custom slug if defined + const relativePath = path.relative(docsDir, filePath).replace(/\\/g, "/") + const dirPath = path.dirname(relativePath) + + let urlPath + if (frontmatter.slug) { + if (frontmatter.slug.startsWith("/")) { + // Absolute slug from docs root + urlPath = frontmatter.slug.slice(1) + } else { + // Relative slug within the doc's directory + urlPath = dirPath === "." + ? frontmatter.slug + : `${dirPath}/${frontmatter.slug}` + } + } else { + // Build from file path + urlPath = relativePath + .replace(/\.mdx?$/, "") + .replace(/\/index$/, "") + } + + // Ensure trailing slash + if (urlPath && !urlPath.endsWith("/")) { + urlPath += "/" + } + + const baseUrl = siteConfig.baseUrl.endsWith("/") + ? siteConfig.baseUrl + : siteConfig.baseUrl + "/" + const url = siteConfig.url + baseUrl + urlPath + + items.push({ + title, + url, + date: lastModified, + excerpt, + }) + } catch (err) { + console.warn(`[docs-rss] Error processing ${filePath}:`, err.message) + } + } + + // Sort by date descending and take top N + items.sort((a, b) => b.date - a.date) + return items.slice(0, FEED_ITEMS_COUNT) +} + +module.exports = function docsRssPlugin(context) { + return { + name: "docs-rss", + + // Generate RSS feed to static directory (gets copied to build output) + async loadContent() { + const { siteConfig } = context + const docsDir = path.join(context.siteDir, "documentation") + const staticDir = path.join(context.siteDir, "static") + + if (!fs.existsSync(docsDir)) { + console.warn("[docs-rss] Documentation directory not found") + return [] + } + + console.log("[docs-rss] Generating RSS feed...") + + const recentItems = generateFeedItems(docsDir, siteConfig) + const rssXml = generateRssXml(recentItems, siteConfig) + const rssPath = path.join(staticDir, "rss.xml") + fs.writeFileSync(rssPath, rssXml, "utf-8") + + console.log(`[docs-rss] Generated RSS feed with ${recentItems.length} items`) + + // Return items for use in contentLoaded + return recentItems.map((item) => ({ + ...item, + date: item.date.toISOString(), + })) + }, + + // Expose changelog data as global data + async contentLoaded({ content, actions }) { + const { setGlobalData } = actions + setGlobalData({ changelog: content || [] }) + }, + } +} diff --git a/src/components/Changelog/index.tsx b/src/components/Changelog/index.tsx new file mode 100644 index 000000000..a6a3151dc --- /dev/null +++ b/src/components/Changelog/index.tsx @@ -0,0 +1,120 @@ +import { usePluginData } from "@docusaurus/useGlobalData" +import Link from "@docusaurus/Link" + +function RssIcon({ className }: { className?: string }) { + return ( + + + + + ) +} + +type ChangelogItem = { + title: string + url: string + date: string + excerpt: string +} + +type ChangelogData = { + changelog: ChangelogItem[] +} + +function formatDate(dateString: string): string { + const date = new Date(dateString) + return date.toLocaleDateString("en-US", { + year: "numeric", + month: "long", + day: "numeric", + timeZone: "UTC", + }) +} + +function getRelativeUrl(fullUrl: string): string { + // Extract path from full URL for internal linking + try { + const url = new URL(fullUrl) + return url.pathname + } catch { + return fullUrl + } +} + +function groupByDate(items: ChangelogItem[]): Map { + const groups = new Map() + + for (const item of items) { + const date = new Date(item.date) + const dateKey = date.toISOString().split("T")[0] + + if (!groups.has(dateKey)) { + groups.set(dateKey, []) + } + groups.get(dateKey)!.push(item) + } + + return groups +} + +export default function Changelog() { + const data = usePluginData("docs-rss") as ChangelogData | undefined + + if (!data?.changelog?.length) { + return ( +
+ No recent documentation updates found. +
+ ) + } + + const groupedItems = groupByDate(data.changelog) + + return ( +
+ + + {Array.from(groupedItems.entries()).map(([dateKey, items]) => ( +
+

+ {formatDate(items[0].date)} +

+
    + {items.map((item, index) => ( +
  • + + {item.title} + + {item.excerpt && ( +

    + {item.excerpt} +

    + )} +
  • + ))} +
+
+ ))} + +
+ ) +} From fa3902f00d3718fa0f408c21fd647c2719744184 Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 20:05:20 +0000 Subject: [PATCH 2/7] fix: fetch full git history for accurate changelog dates on Netlify The docs-rss plugin uses git log to determine file modification dates. Netlify's shallow clones caused all files to show the same date. Co-Authored-By: Claude Opus 4.5 --- netlify.toml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 netlify.toml diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 000000000..825ae9fc6 --- /dev/null +++ b/netlify.toml @@ -0,0 +1,6 @@ +[build] + command = "git fetch --unshallow || true && yarn build" + publish = "build" + +[build.environment] + NODE_VERSION = "18" From 44e07e2e2fe3318233337b8a1c571704d7c33c32 Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 20:08:15 +0000 Subject: [PATCH 3/7] chore: add comment explaining git unshallow --- netlify.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/netlify.toml b/netlify.toml index 825ae9fc6..8d1193761 100644 --- a/netlify.toml +++ b/netlify.toml @@ -1,4 +1,5 @@ [build] + # Fetch full git history so docs-rss plugin can get accurate file modification dates command = "git fetch --unshallow || true && yarn build" publish = "build" From 1675cd49716e6b4563cddc9dd1edc21ec5891afa Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 20:12:36 +0000 Subject: [PATCH 4/7] fix: try prune unshallow or depth 500 for git history --- netlify.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/netlify.toml b/netlify.toml index 8d1193761..b30df1013 100644 --- a/netlify.toml +++ b/netlify.toml @@ -1,6 +1,7 @@ [build] # Fetch full git history so docs-rss plugin can get accurate file modification dates - command = "git fetch --unshallow || true && yarn build" + # Netlify uses shallow clones by default + command = "git fetch --prune --unshallow || git fetch --depth=500 && yarn build" publish = "build" [build.environment] From cac935ed8f0bf57197a8b2e084ca57d66ded9aae Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 20:21:14 +0000 Subject: [PATCH 5/7] fix: use GitHub API for file dates in shallow clones Detects shallow clone (e.g., Netlify) and fetches file modification dates from GitHub API instead of git log. Falls back to local git when full history is available. Co-Authored-By: Claude Opus 4.5 --- netlify.toml | 4 +- plugins/docs-rss/index.js | 134 ++++++++++++++++++++++++++------------ 2 files changed, 94 insertions(+), 44 deletions(-) diff --git a/netlify.toml b/netlify.toml index b30df1013..b7602dac2 100644 --- a/netlify.toml +++ b/netlify.toml @@ -1,7 +1,5 @@ [build] - # Fetch full git history so docs-rss plugin can get accurate file modification dates - # Netlify uses shallow clones by default - command = "git fetch --prune --unshallow || git fetch --depth=500 && yarn build" + command = "yarn build" publish = "build" [build.environment] diff --git a/plugins/docs-rss/index.js b/plugins/docs-rss/index.js index 6126281c3..bad7a4669 100644 --- a/plugins/docs-rss/index.js +++ b/plugins/docs-rss/index.js @@ -4,9 +4,40 @@ const { execSync } = require("child_process") const matter = require("gray-matter") const FEED_ITEMS_COUNT = 20 +const GITHUB_REPO = "questdb/documentation" /** - * Get the last git commit date for a file + * Get the last commit date for a file using GitHub API + */ +async function getGitHubLastModified(filePath, docsDir) { + const relativePath = path.relative(path.dirname(docsDir), filePath).replace(/\\/g, "/") + const url = `https://api.github.com/repos/${GITHUB_REPO}/commits?path=${encodeURIComponent(relativePath)}&per_page=1` + + try { + const response = await fetch(url, { + headers: { + "Accept": "application/vnd.github.v3+json", + "User-Agent": "questdb-docs-rss" + } + }) + + if (!response.ok) { + return null + } + + const commits = await response.json() + if (commits && commits.length > 0 && commits[0].commit) { + return new Date(commits[0].commit.committer.date) + } + } catch (err) { + console.warn(`[docs-rss] GitHub API error for ${relativePath}:`, err.message) + } + + return null +} + +/** + * Get the last git commit date for a file (local fallback) */ function getGitLastModified(filePath) { try { @@ -24,25 +55,16 @@ function getGitLastModified(filePath) { * Extract excerpt from markdown content */ function extractExcerpt(content, maxLength = 200) { - // Remove MDX imports and components let text = content .replace(/^import\s+.*$/gm, "") .replace(/<[^>]+>/g, "") - // Remove code blocks .replace(/```[\s\S]*?```/g, "") - // Remove inline code .replace(/`[^`]+`/g, "") - // Remove headers .replace(/^#{1,6}\s+.*$/gm, "") - // Remove links but keep text .replace(/\[([^\]]+)\]\([^)]+\)/g, "$1") - // Remove images .replace(/!\[[^\]]*\]\([^)]+\)/g, "") - // Remove admonitions .replace(/^:::\w+[\s\S]*?^:::/gm, "") - // Remove HTML comments .replace(//g, "") - // Normalize whitespace .replace(/\s+/g, " ") .trim() @@ -124,59 +146,43 @@ ${itemsXml} /** * Generate RSS feed items from documentation */ -function generateFeedItems(docsDir, siteConfig) { +async function generateFeedItems(docsDir, siteConfig, useGitHubApi) { const markdownFiles = getAllMarkdownFiles(docsDir) const items = [] + // Process files to get metadata (without dates yet) + const fileData = [] for (const filePath of markdownFiles) { try { const fileContent = fs.readFileSync(filePath, "utf-8") const { data: frontmatter, content } = matter(fileContent) - // Skip drafts and pages excluded from changelog if (frontmatter.draft === true || frontmatter.changelog === false) { continue } - // Get title from frontmatter or first heading let title = frontmatter.title if (!title) { const headingMatch = content.match(/^#\s+(.+)$/m) title = headingMatch ? headingMatch[1] : path.basename(filePath, path.extname(filePath)) } - // Get description from frontmatter or extract from content const excerpt = frontmatter.description || extractExcerpt(content) - // Get last modified date from git - const lastModified = getGitLastModified(filePath) - if (!lastModified) { - continue - } - - // Build URL from file path, respecting custom slug if defined const relativePath = path.relative(docsDir, filePath).replace(/\\/g, "/") const dirPath = path.dirname(relativePath) let urlPath if (frontmatter.slug) { if (frontmatter.slug.startsWith("/")) { - // Absolute slug from docs root urlPath = frontmatter.slug.slice(1) } else { - // Relative slug within the doc's directory - urlPath = dirPath === "." - ? frontmatter.slug - : `${dirPath}/${frontmatter.slug}` + urlPath = dirPath === "." ? frontmatter.slug : `${dirPath}/${frontmatter.slug}` } } else { - // Build from file path - urlPath = relativePath - .replace(/\.mdx?$/, "") - .replace(/\/index$/, "") + urlPath = relativePath.replace(/\.mdx?$/, "").replace(/\/index$/, "") } - // Ensure trailing slash if (urlPath && !urlPath.endsWith("/")) { urlPath += "/" } @@ -186,27 +192,69 @@ function generateFeedItems(docsDir, siteConfig) { : siteConfig.baseUrl + "/" const url = siteConfig.url + baseUrl + urlPath - items.push({ - title, - url, - date: lastModified, - excerpt, - }) + fileData.push({ filePath, title, url, excerpt }) } catch (err) { console.warn(`[docs-rss] Error processing ${filePath}:`, err.message) } } + // Get dates - use GitHub API or git log + if (useGitHubApi) { + console.log("[docs-rss] Using GitHub API for file dates...") + // Fetch dates in parallel with concurrency limit + const CONCURRENCY = 10 + for (let i = 0; i < fileData.length; i += CONCURRENCY) { + const batch = fileData.slice(i, i + CONCURRENCY) + const dates = await Promise.all( + batch.map(f => getGitHubLastModified(f.filePath, docsDir)) + ) + batch.forEach((f, idx) => { + f.date = dates[idx] + }) + } + } else { + console.log("[docs-rss] Using local git for file dates...") + for (const f of fileData) { + f.date = getGitLastModified(f.filePath) + } + } + + // Filter out files without dates and build final items + for (const f of fileData) { + if (f.date) { + items.push({ + title: f.title, + url: f.url, + date: f.date, + excerpt: f.excerpt, + }) + } + } + // Sort by date descending and take top N items.sort((a, b) => b.date - a.date) return items.slice(0, FEED_ITEMS_COUNT) } +/** + * Detect if we're in a shallow git clone + */ +function isShallowClone() { + try { + const result = execSync("git rev-parse --is-shallow-repository", { + encoding: "utf-8", + stdio: ["pipe", "pipe", "ignore"] + }).trim() + return result === "true" + } catch { + return false + } +} + module.exports = function docsRssPlugin(context) { return { name: "docs-rss", - // Generate RSS feed to static directory (gets copied to build output) async loadContent() { const { siteConfig } = context const docsDir = path.join(context.siteDir, "documentation") @@ -219,21 +267,25 @@ module.exports = function docsRssPlugin(context) { console.log("[docs-rss] Generating RSS feed...") - const recentItems = generateFeedItems(docsDir, siteConfig) + // Use GitHub API if in shallow clone (e.g., Netlify) + const useGitHubApi = isShallowClone() + if (useGitHubApi) { + console.log("[docs-rss] Shallow clone detected, using GitHub API") + } + + const recentItems = await generateFeedItems(docsDir, siteConfig, useGitHubApi) const rssXml = generateRssXml(recentItems, siteConfig) const rssPath = path.join(staticDir, "rss.xml") fs.writeFileSync(rssPath, rssXml, "utf-8") console.log(`[docs-rss] Generated RSS feed with ${recentItems.length} items`) - // Return items for use in contentLoaded return recentItems.map((item) => ({ ...item, date: item.date.toISOString(), })) }, - // Expose changelog data as global data async contentLoaded({ content, actions }) { const { setGlobalData } = actions setGlobalData({ changelog: content || [] }) From ddc036d525eab637eeadfcb1b28b7c7da5486ea1 Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 20:31:08 +0000 Subject: [PATCH 6/7] fix: use full git clone on Netlify for accurate file dates Set GIT_CLONE_DEPTH=0 in netlify.toml for full history. Reverted plugin to simple git-only version. Co-Authored-By: Claude Opus 4.5 --- netlify.toml | 2 + plugins/docs-rss/index.js | 106 +++++--------------------------------- 2 files changed, 16 insertions(+), 92 deletions(-) diff --git a/netlify.toml b/netlify.toml index b7602dac2..7ccfb6399 100644 --- a/netlify.toml +++ b/netlify.toml @@ -4,3 +4,5 @@ [build.environment] NODE_VERSION = "18" + # Full git clone for accurate file modification dates in docs-rss plugin + GIT_CLONE_DEPTH = "0" diff --git a/plugins/docs-rss/index.js b/plugins/docs-rss/index.js index bad7a4669..ecc6bc87d 100644 --- a/plugins/docs-rss/index.js +++ b/plugins/docs-rss/index.js @@ -4,40 +4,9 @@ const { execSync } = require("child_process") const matter = require("gray-matter") const FEED_ITEMS_COUNT = 20 -const GITHUB_REPO = "questdb/documentation" /** - * Get the last commit date for a file using GitHub API - */ -async function getGitHubLastModified(filePath, docsDir) { - const relativePath = path.relative(path.dirname(docsDir), filePath).replace(/\\/g, "/") - const url = `https://api.github.com/repos/${GITHUB_REPO}/commits?path=${encodeURIComponent(relativePath)}&per_page=1` - - try { - const response = await fetch(url, { - headers: { - "Accept": "application/vnd.github.v3+json", - "User-Agent": "questdb-docs-rss" - } - }) - - if (!response.ok) { - return null - } - - const commits = await response.json() - if (commits && commits.length > 0 && commits[0].commit) { - return new Date(commits[0].commit.committer.date) - } - } catch (err) { - console.warn(`[docs-rss] GitHub API error for ${relativePath}:`, err.message) - } - - return null -} - -/** - * Get the last git commit date for a file (local fallback) + * Get the last git commit date for a file */ function getGitLastModified(filePath) { try { @@ -146,12 +115,10 @@ ${itemsXml} /** * Generate RSS feed items from documentation */ -async function generateFeedItems(docsDir, siteConfig, useGitHubApi) { +function generateFeedItems(docsDir, siteConfig) { const markdownFiles = getAllMarkdownFiles(docsDir) const items = [] - // Process files to get metadata (without dates yet) - const fileData = [] for (const filePath of markdownFiles) { try { const fileContent = fs.readFileSync(filePath, "utf-8") @@ -168,6 +135,11 @@ async function generateFeedItems(docsDir, siteConfig, useGitHubApi) { } const excerpt = frontmatter.description || extractExcerpt(content) + const lastModified = getGitLastModified(filePath) + + if (!lastModified) { + continue + } const relativePath = path.relative(docsDir, filePath).replace(/\\/g, "/") const dirPath = path.dirname(relativePath) @@ -192,65 +164,21 @@ async function generateFeedItems(docsDir, siteConfig, useGitHubApi) { : siteConfig.baseUrl + "/" const url = siteConfig.url + baseUrl + urlPath - fileData.push({ filePath, title, url, excerpt }) - } catch (err) { - console.warn(`[docs-rss] Error processing ${filePath}:`, err.message) - } - } - - // Get dates - use GitHub API or git log - if (useGitHubApi) { - console.log("[docs-rss] Using GitHub API for file dates...") - // Fetch dates in parallel with concurrency limit - const CONCURRENCY = 10 - for (let i = 0; i < fileData.length; i += CONCURRENCY) { - const batch = fileData.slice(i, i + CONCURRENCY) - const dates = await Promise.all( - batch.map(f => getGitHubLastModified(f.filePath, docsDir)) - ) - batch.forEach((f, idx) => { - f.date = dates[idx] - }) - } - } else { - console.log("[docs-rss] Using local git for file dates...") - for (const f of fileData) { - f.date = getGitLastModified(f.filePath) - } - } - - // Filter out files without dates and build final items - for (const f of fileData) { - if (f.date) { items.push({ - title: f.title, - url: f.url, - date: f.date, - excerpt: f.excerpt, + title, + url, + date: lastModified, + excerpt, }) + } catch (err) { + console.warn(`[docs-rss] Error processing ${filePath}:`, err.message) } } - // Sort by date descending and take top N items.sort((a, b) => b.date - a.date) return items.slice(0, FEED_ITEMS_COUNT) } -/** - * Detect if we're in a shallow git clone - */ -function isShallowClone() { - try { - const result = execSync("git rev-parse --is-shallow-repository", { - encoding: "utf-8", - stdio: ["pipe", "pipe", "ignore"] - }).trim() - return result === "true" - } catch { - return false - } -} - module.exports = function docsRssPlugin(context) { return { name: "docs-rss", @@ -267,13 +195,7 @@ module.exports = function docsRssPlugin(context) { console.log("[docs-rss] Generating RSS feed...") - // Use GitHub API if in shallow clone (e.g., Netlify) - const useGitHubApi = isShallowClone() - if (useGitHubApi) { - console.log("[docs-rss] Shallow clone detected, using GitHub API") - } - - const recentItems = await generateFeedItems(docsDir, siteConfig, useGitHubApi) + const recentItems = generateFeedItems(docsDir, siteConfig) const rssXml = generateRssXml(recentItems, siteConfig) const rssPath = path.join(staticDir, "rss.xml") fs.writeFileSync(rssPath, rssXml, "utf-8") From 76ef68a435eeb6416330d730605d736d597d8700 Mon Sep 17 00:00:00 2001 From: Vlad Ilyushchenko Date: Thu, 29 Jan 2026 20:34:34 +0000 Subject: [PATCH 7/7] fix: use GitHub API for file dates on shallow clones GIT_CLONE_DEPTH not respected by Netlify. Falls back to GitHub API when shallow clone detected. Co-Authored-By: Claude Opus 4.5 --- netlify.toml | 2 - plugins/docs-rss/index.js | 115 +++++++++++++++++++++++++++++++------- 2 files changed, 94 insertions(+), 23 deletions(-) diff --git a/netlify.toml b/netlify.toml index 7ccfb6399..b7602dac2 100644 --- a/netlify.toml +++ b/netlify.toml @@ -4,5 +4,3 @@ [build.environment] NODE_VERSION = "18" - # Full git clone for accurate file modification dates in docs-rss plugin - GIT_CLONE_DEPTH = "0" diff --git a/plugins/docs-rss/index.js b/plugins/docs-rss/index.js index ecc6bc87d..551893197 100644 --- a/plugins/docs-rss/index.js +++ b/plugins/docs-rss/index.js @@ -4,16 +4,61 @@ const { execSync } = require("child_process") const matter = require("gray-matter") const FEED_ITEMS_COUNT = 20 +const GITHUB_REPO = "questdb/documentation" + +/** + * Check if running in a shallow git clone + */ +function isShallowClone() { + try { + const result = execSync("git rev-parse --is-shallow-repository", { + encoding: "utf-8", + stdio: ["pipe", "pipe", "ignore"], + }).trim() + return result === "true" + } catch { + return false + } +} + +/** + * Get the last commit date for a file using GitHub API + */ +async function getGitHubLastModified(relativePath) { + const url = `https://api.github.com/repos/${GITHUB_REPO}/commits?path=${encodeURIComponent(relativePath)}&per_page=1` + + try { + const response = await fetch(url, { + headers: { + Accept: "application/vnd.github.v3+json", + "User-Agent": "questdb-docs-rss", + }, + }) + + if (!response.ok) { + return null + } + + const commits = await response.json() + if (commits?.length > 0 && commits[0].commit) { + return new Date(commits[0].commit.committer.date) + } + } catch { + // Ignore errors + } + + return null +} /** * Get the last git commit date for a file */ function getGitLastModified(filePath) { try { - const timestamp = execSync( - `git log -1 --format=%cI -- "${filePath}"`, - { encoding: "utf-8", stdio: ["pipe", "pipe", "ignore"] } - ).trim() + const timestamp = execSync(`git log -1 --format=%cI -- "${filePath}"`, { + encoding: "utf-8", + stdio: ["pipe", "pipe", "ignore"], + }).trim() return timestamp ? new Date(timestamp) : null } catch { return null @@ -115,10 +160,11 @@ ${itemsXml} /** * Generate RSS feed items from documentation */ -function generateFeedItems(docsDir, siteConfig) { +async function generateFeedItems(docsDir, siteConfig, useGitHubApi) { const markdownFiles = getAllMarkdownFiles(docsDir) - const items = [] + const fileData = [] + // Collect file metadata for (const filePath of markdownFiles) { try { const fileContent = fs.readFileSync(filePath, "utf-8") @@ -131,17 +177,14 @@ function generateFeedItems(docsDir, siteConfig) { let title = frontmatter.title if (!title) { const headingMatch = content.match(/^#\s+(.+)$/m) - title = headingMatch ? headingMatch[1] : path.basename(filePath, path.extname(filePath)) + title = headingMatch + ? headingMatch[1] + : path.basename(filePath, path.extname(filePath)) } const excerpt = frontmatter.description || extractExcerpt(content) - const lastModified = getGitLastModified(filePath) - - if (!lastModified) { - continue - } - const relativePath = path.relative(docsDir, filePath).replace(/\\/g, "/") + const repoPath = "documentation/" + relativePath const dirPath = path.dirname(relativePath) let urlPath @@ -149,7 +192,8 @@ function generateFeedItems(docsDir, siteConfig) { if (frontmatter.slug.startsWith("/")) { urlPath = frontmatter.slug.slice(1) } else { - urlPath = dirPath === "." ? frontmatter.slug : `${dirPath}/${frontmatter.slug}` + urlPath = + dirPath === "." ? frontmatter.slug : `${dirPath}/${frontmatter.slug}` } } else { urlPath = relativePath.replace(/\.mdx?$/, "").replace(/\/index$/, "") @@ -164,17 +208,41 @@ function generateFeedItems(docsDir, siteConfig) { : siteConfig.baseUrl + "/" const url = siteConfig.url + baseUrl + urlPath - items.push({ - title, - url, - date: lastModified, - excerpt, - }) + fileData.push({ filePath, repoPath, title, url, excerpt }) } catch (err) { console.warn(`[docs-rss] Error processing ${filePath}:`, err.message) } } + // Get dates + if (useGitHubApi) { + console.log("[docs-rss] Using GitHub API for file dates...") + const CONCURRENCY = 10 + for (let i = 0; i < fileData.length; i += CONCURRENCY) { + const batch = fileData.slice(i, i + CONCURRENCY) + const dates = await Promise.all( + batch.map((f) => getGitHubLastModified(f.repoPath)) + ) + batch.forEach((f, idx) => { + f.date = dates[idx] + }) + } + } else { + for (const f of fileData) { + f.date = getGitLastModified(f.filePath) + } + } + + // Build final items + const items = fileData + .filter((f) => f.date) + .map((f) => ({ + title: f.title, + url: f.url, + date: f.date, + excerpt: f.excerpt, + })) + items.sort((a, b) => b.date - a.date) return items.slice(0, FEED_ITEMS_COUNT) } @@ -195,7 +263,12 @@ module.exports = function docsRssPlugin(context) { console.log("[docs-rss] Generating RSS feed...") - const recentItems = generateFeedItems(docsDir, siteConfig) + const useGitHubApi = isShallowClone() + if (useGitHubApi) { + console.log("[docs-rss] Shallow clone detected, using GitHub API") + } + + const recentItems = await generateFeedItems(docsDir, siteConfig, useGitHubApi) const rssXml = generateRssXml(recentItems, siteConfig) const rssPath = path.join(staticDir, "rss.xml") fs.writeFileSync(rssPath, rssXml, "utf-8")