diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..d0febe6f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,13 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "monthly" + allow: + - dependency-name: "astro" + - dependency-name: "@astrojs/starlight" diff --git a/.github/scripts/languagetool_reviewdog.py b/.github/scripts/languagetool_reviewdog.py new file mode 100644 index 00000000..050dc37e --- /dev/null +++ b/.github/scripts/languagetool_reviewdog.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python3 +import argparse +import json +import os +import re +import subprocess +from typing import Dict, List, Optional, Set, Tuple + +import requests + + +def sh(*args: str) -> str: + return subprocess.check_output(args, text=True).strip() + + +def offset_to_line_col(text: str, offset: int) -> Tuple[int, int]: + # reviewdog wants 1-based line/column + line = text.count("\n", 0, offset) + 1 + last_nl = text.rfind("\n", 0, offset) + col = offset - (last_nl + 1) + 1 + return line, col + + +def normalize_word(s: str) -> str: + s = re.sub(r"^[\W_]+|[\W_]+$", "", s, flags=re.UNICODE) + return s.lower() + + +def load_dictionary(path: str) -> Set[str]: + if not path or not os.path.exists(path): + return set() + words: Set[str] = set() + with open(path, "r", encoding="utf-8") as f: + for line in f: + line = line.strip() + if not line or line.startswith("#"): + continue + words.add(line.lower()) + return words + + +def changed_files(base_sha: str, head_sha: str) -> List[str]: + # list only changed files in the PR + out = sh("git", "diff", "--name-only", base_sha, head_sha) + files = [x.strip() for x in out.splitlines() if x.strip()] + return files + + +def is_text_file(path: str) -> bool: + ext = os.path.splitext(path)[1].lower() + return ext in {".md", ".txt", ".rst", ".adoc", ".asciidoc", ".tex"} or os.path.basename(path).lower() in { + "readme", "readme.md", "readme.txt" + } + + +def lt_check(api_url: str, language: str, text: str) -> Dict: + resp = requests.post( + api_url, + data={ + "language": language, + "text": text, + }, + timeout=60, + ) + resp.raise_for_status() + return resp.json() + + +def main() -> int: + ap = argparse.ArgumentParser() + ap.add_argument("--api-url", required=True) + ap.add_argument("--language", required=True) + ap.add_argument("--base-sha", required=True) + ap.add_argument("--head-sha", required=True) + ap.add_argument("--dictionary", default=".languagetool/words.txt") + ap.add_argument("--max-suggestions", type=int, default=3) + args = ap.parse_args() + + dict_words = load_dictionary(args.dictionary) + + files = changed_files(args.base_sha, args.head_sha) + files = [f for f in files if os.path.exists(f) and is_text_file(f)] + + diagnostics: List[Dict] = [] + + for path in files: + try: + with open(path, "r", encoding="utf-8") as f: + content = f.read() + except UnicodeDecodeError: + with open(path, "r", encoding="utf-8", errors="replace") as f: + content = f.read() + + if not content.strip(): + continue + + try: + result = lt_check(args.api_url, args.language, content) + except Exception as e: + # Emit a single diagnostic if the API call fails for a file + diagnostics.append( + { + "message": f"LanguageTool API error for {path}: {e}", + "location": {"path": path, "range": {"start": {"line": 1, "column": 1}}}, + "severity": "WARNING", + } + ) + continue + + matches = result.get("matches", []) + for m in matches: + offset = int(m.get("offset", 0)) + length = int(m.get("length", 0)) + bad = content[offset : offset + length] + + rule = m.get("rule", {}) or {} + rule_id = rule.get("id") or "UNKNOWN_RULE" + category = (rule.get("category", {}) or {}).get("id", "") + + # Cheap custom dictionary support without modifying LT server: + # if LT reports a spelling/typo-ish issue AND the token is in our dictionary -> ignore it. + # (Most spelling problems show up in category TYPOS and/or rule ids containing MORFOLOGIK.) + bad_norm = normalize_word(bad) + if dict_words and bad_norm: + looks_like_spelling = (category.upper() == "TYPOS") or ("MORFOLOGIK" in str(rule_id).upper()) + if looks_like_spelling and (bad_norm in dict_words): + continue + + start_line, start_col = offset_to_line_col(content, offset) + end_line, end_col = offset_to_line_col(content, offset + max(length, 0)) + + # Suggestions (as rdjson "suggestions" with ranges) + suggestions = [] + repls = m.get("replacements", []) or [] + for r in repls[: args.max_suggestions]: + val = r.get("value") + if not val: + continue + suggestions.append( + { + "range": { + "start": {"line": start_line, "column": start_col}, + "end": {"line": end_line, "column": end_col}, + }, + "text": val, + } + ) + + code = {"value": rule_id} + urls = rule.get("urls") or [] + if urls and isinstance(urls, list): + u = urls[0].get("value") + if u: + code["url"] = u + + diagnostics.append( + { + "message": m.get("message") or "LanguageTool finding", + "location": { + "path": path, + "range": { + "start": {"line": start_line, "column": start_col}, + "end": {"line": end_line, "column": end_col}, + }, + }, + "severity": "WARNING", + "code": code, + **({"suggestions": suggestions} if suggestions else {}), + } + ) + + rdjson = { + "source": { + "name": "LanguageTool", + "url": "https://languagetool.org", + }, + "diagnostics": diagnostics, + } + + print(json.dumps(rdjson)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml deleted file mode 100644 index f9fa42a3..00000000 --- a/.github/workflows/deploy.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Deploy to GitHub Pages - -on: - # Trigger the workflow every time you push to the `main` branch - # Using a different branch name? Replace `main` with your branch’s name - push: - branches: [ main ] - # Allows you to run this workflow manually from the Actions tab on GitHub. - workflow_dispatch: - -# Allow this job to clone the repo and create a page deployment -permissions: - contents: read - pages: write - id-token: write - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout your repository using git - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Install, build, and upload your site - uses: withastro/action@v3 - with: - path: . # The root location of your Astro project inside the repository. (optional) - node-version: 20 # The specific version of Node that should be used to build your site. Defaults to 20. (optional) - package-manager: npm # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional) - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - deploy: - needs: build - runs-on: ubuntu-latest - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - steps: - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v4 diff --git a/.github/workflows/languagetool.yml b/.github/workflows/languagetool.yml new file mode 100644 index 00000000..9666f661 --- /dev/null +++ b/.github/workflows/languagetool.yml @@ -0,0 +1,84 @@ +name: Harper (grammar suggestions) + +on: + pull_request: + +permissions: + contents: read + pull-requests: write + +jobs: + harper: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install Harper + run: | + curl -fsSL https://raw.githubusercontent.com/automattic/harper/main/install.sh | sh + echo "$HOME/.harper/bin" >> $GITHUB_PATH + + - name: Run Harper on changed md/mdx + id: harper + run: | + set -euo pipefail + + mapfile -t FILES < <( + git diff --name-only origin/${{ github.base_ref }}...${{ github.sha }} \ + -- '*.md' '*.mdx' || true + ) + + if [ ${#FILES[@]} -eq 0 ]; then + echo "No markdown files changed." > harper.txt + exit 0 + fi + + { + echo "## Harper grammar suggestions" + echo + for f in "${FILES[@]}"; do + [ -f "$f" ] || continue + echo "### $f" + harper "$f" || true + echo + done + } > harper.txt + + - name: Post PR comment + uses: actions/github-script@v7 + with: + script: | + const fs = require("fs"); + const body = fs.readFileSync("harper.txt", "utf8").trim(); + + const marker = ""; + const commentBody = `${marker}\n${body || "No issues found."}`; + + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + + const existing = comments.find(c => + c.body && c.body.includes(marker) + ); + + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body: commentBody, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body: commentBody, + }); + } diff --git a/src/content/docs/Users/System Management/moss-state-management.mdx b/src/content/docs/Users/System Management/moss-state-management.mdx index 58d3b5e3..03920cf9 100644 --- a/src/content/docs/Users/System Management/moss-state-management.mdx +++ b/src/content/docs/Users/System Management/moss-state-management.mdx @@ -15,7 +15,7 @@ Use the commands below to inspect and manage those states, discover software, an ```bash moss state active ``` - + Their may be multiple versions available with different desktop environments donoted by `AerynOS---.iso` where `` is the desktop environment. 2. Review the state history when you need context for a rollback. ```bash