Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ plugins/*/compiled
.cache-loader
static/llms.txt
static/reference-full.md
static/rss.xml
static/web-console/*.json

# Files generated by script validate_queries.py
Expand Down
15 changes: 15 additions & 0 deletions documentation/changelog.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
title: Documentation Changelog
description: Recently updated documentation pages for QuestDB
sidebar_label: Changelog
changelog: false
---

import Changelog from "@site/src/components/Changelog"

# Documentation Changelog

This page lists the most recently updated documentation pages, helping you stay
informed about new content and improvements.

<Changelog />
9 changes: 9 additions & 0 deletions documentation/sidebars.js
Original file line number Diff line number Diff line change
Expand Up @@ -911,5 +911,14 @@ module.exports = {
type: "link",
href: "https://questdb.com/release-notes",
},

// ===================
// CHANGELOG
// ===================
{
id: "changelog",
type: "doc",
label: "Documentation Changelog",
},
].filter(Boolean),
}
12 changes: 12 additions & 0 deletions docusaurus.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,17 @@ const config = {
crossorigin: "anonymous",
},
],
headTags: [
{
tagName: "link",
attributes: {
rel: "alternate",
type: "application/rss+xml",
title: "QuestDB Documentation RSS Feed",
href: "/docs/rss.xml",
},
},
],
scripts: [
{
src: "https://widget.kapa.ai/kapa-widget.bundle.js",
Expand Down Expand Up @@ -161,6 +172,7 @@ const config = {
require.resolve("./plugins/raw-markdown/index"),

require.resolve("./plugins/tailwind/index"),
require.resolve("./plugins/docs-rss/index"),
[
"@docusaurus/plugin-pwa",
{
Expand Down
6 changes: 6 additions & 0 deletions netlify.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[build]
command = "yarn build"
publish = "build"

[build.environment]
NODE_VERSION = "18"
289 changes: 289 additions & 0 deletions plugins/docs-rss/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,289 @@
const fs = require("fs")
const path = require("path")
const { execSync } = require("child_process")
const matter = require("gray-matter")

const FEED_ITEMS_COUNT = 20
const GITHUB_REPO = "questdb/documentation"

/**
* Check if running in a shallow git clone
*/
function isShallowClone() {
try {
const result = execSync("git rev-parse --is-shallow-repository", {
encoding: "utf-8",
stdio: ["pipe", "pipe", "ignore"],
}).trim()
return result === "true"
} catch {
return false
}
}

/**
* Get the last commit date for a file using GitHub API
*/
async function getGitHubLastModified(relativePath) {
const url = `https://api.github.com/repos/${GITHUB_REPO}/commits?path=${encodeURIComponent(relativePath)}&per_page=1`

try {
const response = await fetch(url, {
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": "questdb-docs-rss",
},
})

if (!response.ok) {
return null
}

const commits = await response.json()
if (commits?.length > 0 && commits[0].commit) {
return new Date(commits[0].commit.committer.date)
}
} catch {
// Ignore errors
}

return null
}

/**
* Get the last git commit date for a file
*/
function getGitLastModified(filePath) {
try {
const timestamp = execSync(`git log -1 --format=%cI -- "${filePath}"`, {
encoding: "utf-8",
stdio: ["pipe", "pipe", "ignore"],
}).trim()
return timestamp ? new Date(timestamp) : null
} catch {
return null
}
}

/**
* Extract excerpt from markdown content
*/
function extractExcerpt(content, maxLength = 200) {
let text = content
.replace(/^import\s+.*$/gm, "")
.replace(/<[^>]+>/g, "")
.replace(/```[\s\S]*?```/g, "")
.replace(/`[^`]+`/g, "")
.replace(/^#{1,6}\s+.*$/gm, "")
.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1")
.replace(/!\[[^\]]*\]\([^)]+\)/g, "")
.replace(/^:::\w+[\s\S]*?^:::/gm, "")
.replace(/<!--[\s\S]*?-->/g, "")
.replace(/\s+/g, " ")
.trim()

if (text.length > maxLength) {
text = text.substring(0, maxLength).replace(/\s+\S*$/, "") + "..."
}

return text
}

/**
* Escape XML special characters
*/
function escapeXml(str) {
if (!str) return ""
return str
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&apos;")
}

/**
* Recursively get all markdown files
*/
function getAllMarkdownFiles(dir, files = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true })

for (const entry of entries) {
const fullPath = path.join(dir, entry.name)

if (entry.isDirectory()) {
getAllMarkdownFiles(fullPath, files)
} else if (
(entry.name.endsWith(".md") || entry.name.endsWith(".mdx")) &&
!entry.name.includes(".partial.")
) {
files.push(fullPath)
}
}

return files
}

/**
* Generate RSS XML
*/
function generateRssXml(items, siteConfig) {
const siteUrl = siteConfig.url + siteConfig.baseUrl
const now = new Date().toUTCString()

const itemsXml = items
.map(
(item) => ` <item>
<title>${escapeXml(item.title)}</title>
<link>${escapeXml(item.url)}</link>
<guid>${escapeXml(item.url)}</guid>
<pubDate>${item.date.toUTCString()}</pubDate>
<description>${escapeXml(item.excerpt)}</description>
</item>`
)
.join("\n")

return `<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>${escapeXml(siteConfig.title)} Documentation</title>
<link>${escapeXml(siteUrl)}</link>
<description>${escapeXml(siteConfig.tagline)}</description>
<language>en</language>
<lastBuildDate>${now}</lastBuildDate>
<atom:link href="${escapeXml(siteUrl)}rss.xml" rel="self" type="application/rss+xml"/>
${itemsXml}
</channel>
</rss>`
}

/**
* Generate RSS feed items from documentation
*/
async function generateFeedItems(docsDir, siteConfig, useGitHubApi) {
const markdownFiles = getAllMarkdownFiles(docsDir)
const fileData = []

// Collect file metadata
for (const filePath of markdownFiles) {
try {
const fileContent = fs.readFileSync(filePath, "utf-8")
const { data: frontmatter, content } = matter(fileContent)

if (frontmatter.draft === true || frontmatter.changelog === false) {
continue
}

let title = frontmatter.title
if (!title) {
const headingMatch = content.match(/^#\s+(.+)$/m)
title = headingMatch
? headingMatch[1]
: path.basename(filePath, path.extname(filePath))
}

const excerpt = frontmatter.description || extractExcerpt(content)
const relativePath = path.relative(docsDir, filePath).replace(/\\/g, "/")
const repoPath = "documentation/" + relativePath
const dirPath = path.dirname(relativePath)

let urlPath
if (frontmatter.slug) {
if (frontmatter.slug.startsWith("/")) {
urlPath = frontmatter.slug.slice(1)
} else {
urlPath =
dirPath === "." ? frontmatter.slug : `${dirPath}/${frontmatter.slug}`
}
} else {
urlPath = relativePath.replace(/\.mdx?$/, "").replace(/\/index$/, "")
}

if (urlPath && !urlPath.endsWith("/")) {
urlPath += "/"
}

const baseUrl = siteConfig.baseUrl.endsWith("/")
? siteConfig.baseUrl
: siteConfig.baseUrl + "/"
const url = siteConfig.url + baseUrl + urlPath

fileData.push({ filePath, repoPath, title, url, excerpt })
} catch (err) {
console.warn(`[docs-rss] Error processing ${filePath}:`, err.message)
}
}

// Get dates
if (useGitHubApi) {
console.log("[docs-rss] Using GitHub API for file dates...")
const CONCURRENCY = 10
for (let i = 0; i < fileData.length; i += CONCURRENCY) {
const batch = fileData.slice(i, i + CONCURRENCY)
const dates = await Promise.all(
batch.map((f) => getGitHubLastModified(f.repoPath))
)
batch.forEach((f, idx) => {
f.date = dates[idx]
})
}
} else {
for (const f of fileData) {
f.date = getGitLastModified(f.filePath)
}
}

// Build final items
const items = fileData
.filter((f) => f.date)
.map((f) => ({
title: f.title,
url: f.url,
date: f.date,
excerpt: f.excerpt,
}))

items.sort((a, b) => b.date - a.date)
return items.slice(0, FEED_ITEMS_COUNT)
}

module.exports = function docsRssPlugin(context) {
return {
name: "docs-rss",

async loadContent() {
const { siteConfig } = context
const docsDir = path.join(context.siteDir, "documentation")
const staticDir = path.join(context.siteDir, "static")

if (!fs.existsSync(docsDir)) {
console.warn("[docs-rss] Documentation directory not found")
return []
}

console.log("[docs-rss] Generating RSS feed...")

const useGitHubApi = isShallowClone()
if (useGitHubApi) {
console.log("[docs-rss] Shallow clone detected, using GitHub API")
}

const recentItems = await generateFeedItems(docsDir, siteConfig, useGitHubApi)
const rssXml = generateRssXml(recentItems, siteConfig)
const rssPath = path.join(staticDir, "rss.xml")
fs.writeFileSync(rssPath, rssXml, "utf-8")

console.log(`[docs-rss] Generated RSS feed with ${recentItems.length} items`)

return recentItems.map((item) => ({
...item,
date: item.date.toISOString(),
}))
},

async contentLoaded({ content, actions }) {
const { setGlobalData } = actions
setGlobalData({ changelog: content || [] })
},
}
}
Loading